mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-23 10:37:41 +08:00
Merge branch 'main' of github.com:meilisearch/meilisearch into CNLHC/change_json_error_message
This commit is contained in:
commit
193c666bf9
4
.github/workflows/publish-binaries.yml
vendored
4
.github/workflows/publish-binaries.yml
vendored
@ -6,7 +6,7 @@ name: Publish binaries to release
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish:
|
publish:
|
||||||
name: Publish for ${{ matrix.os }}
|
name: Publish binary for ${{ matrix.os }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@ -39,7 +39,7 @@ jobs:
|
|||||||
tag: ${{ github.ref }}
|
tag: ${{ github.ref }}
|
||||||
|
|
||||||
publish-aarch64:
|
publish-aarch64:
|
||||||
name: Publish to GitHub
|
name: Publish binary for aarch64
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
continue-on-error: false
|
continue-on-error: false
|
||||||
strategy:
|
strategy:
|
||||||
|
19
.github/workflows/rust.yml
vendored
19
.github/workflows/rust.yml
vendored
@ -36,6 +36,25 @@ jobs:
|
|||||||
command: test
|
command: test
|
||||||
args: --locked --release
|
args: --locked --release
|
||||||
|
|
||||||
|
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||||
|
test-debug:
|
||||||
|
name: Run tests in debug
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
profile: minimal
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- name: Cache dependencies
|
||||||
|
uses: Swatinem/rust-cache@v1.3.0
|
||||||
|
- name: Run tests in debug
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --locked
|
||||||
|
|
||||||
clippy:
|
clippy:
|
||||||
name: Run Clippy
|
name: Run Clippy
|
||||||
runs-on: ubuntu-18.04
|
runs-on: ubuntu-18.04
|
||||||
|
997
Cargo.lock
generated
997
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,5 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
"meilisearch-http",
|
"meilisearch-http",
|
||||||
"meilisearch-error",
|
"meilisearch-error",
|
||||||
|
@ -3,7 +3,8 @@ status = [
|
|||||||
'Tests on macos-latest',
|
'Tests on macos-latest',
|
||||||
'Tests on windows-latest',
|
'Tests on windows-latest',
|
||||||
'Run Clippy',
|
'Run Clippy',
|
||||||
'Run Rustfmt'
|
'Run Rustfmt',
|
||||||
|
'Run tests in debug',
|
||||||
]
|
]
|
||||||
pr_status = ['Milestone Check']
|
pr_status = ['Milestone Check']
|
||||||
# 3 hours timeout
|
# 3 hours timeout
|
||||||
|
@ -5,11 +5,11 @@ edition = "2021"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
enum-iterator = "0.7.0"
|
enum-iterator = "0.7.0"
|
||||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
|
||||||
sha2 = "0.9.6"
|
|
||||||
chrono = { version = "0.4.19", features = ["serde"] }
|
|
||||||
meilisearch-error = { path = "../meilisearch-error" }
|
meilisearch-error = { path = "../meilisearch-error" }
|
||||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.24.0" }
|
||||||
rand = "0.8.4"
|
rand = "0.8.4"
|
||||||
serde = { version = "1.0.130", features = ["derive"] }
|
serde = { version = "1.0.136", features = ["derive"] }
|
||||||
thiserror = "1.0.28"
|
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||||
|
sha2 = "0.10.2"
|
||||||
|
thiserror = "1.0.30"
|
||||||
|
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
|
@ -10,13 +10,13 @@ pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
|||||||
pub enum AuthControllerError {
|
pub enum AuthControllerError {
|
||||||
#[error("`{0}` field is mandatory.")]
|
#[error("`{0}` field is mandatory.")]
|
||||||
MissingParameter(&'static str),
|
MissingParameter(&'static str),
|
||||||
#[error("actions field value `{0}` is invalid. It should be an array of string representing action names.")]
|
#[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
|
||||||
InvalidApiKeyActions(Value),
|
InvalidApiKeyActions(Value),
|
||||||
#[error("indexes field value `{0}` is invalid. It should be an array of string representing index names.")]
|
#[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
|
||||||
InvalidApiKeyIndexes(Value),
|
InvalidApiKeyIndexes(Value),
|
||||||
#[error("expiresAt field value `{0}` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'.")]
|
#[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
|
||||||
InvalidApiKeyExpiresAt(Value),
|
InvalidApiKeyExpiresAt(Value),
|
||||||
#[error("description field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
||||||
InvalidApiKeyDescription(Value),
|
InvalidApiKeyDescription(Value),
|
||||||
#[error("API key `{0}` not found.")]
|
#[error("API key `{0}` not found.")]
|
||||||
ApiKeyNotFound(String),
|
ApiKeyNotFound(String),
|
||||||
@ -25,7 +25,7 @@ pub enum AuthControllerError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
internal_error!(
|
internal_error!(
|
||||||
AuthControllerError: heed::Error,
|
AuthControllerError: milli::heed::Error,
|
||||||
std::io::Error,
|
std::io::Error,
|
||||||
serde_json::Error,
|
serde_json::Error,
|
||||||
std::str::Utf8Error
|
std::str::Utf8Error
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
use crate::action::Action;
|
use crate::action::Action;
|
||||||
use crate::error::{AuthControllerError, Result};
|
use crate::error::{AuthControllerError, Result};
|
||||||
use crate::store::{KeyId, KEY_ID_LENGTH};
|
use crate::store::{KeyId, KEY_ID_LENGTH};
|
||||||
use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};
|
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{from_value, Value};
|
use serde_json::{from_value, Value};
|
||||||
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
use time::macros::{format_description, time};
|
||||||
|
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
pub struct Key {
|
pub struct Key {
|
||||||
@ -13,9 +15,12 @@ pub struct Key {
|
|||||||
pub id: KeyId,
|
pub id: KeyId,
|
||||||
pub actions: Vec<Action>,
|
pub actions: Vec<Action>,
|
||||||
pub indexes: Vec<String>,
|
pub indexes: Vec<String>,
|
||||||
pub expires_at: Option<DateTime<Utc>>,
|
#[serde(with = "time::serde::rfc3339::option")]
|
||||||
pub created_at: DateTime<Utc>,
|
pub expires_at: Option<OffsetDateTime>,
|
||||||
pub updated_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub created_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub updated_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Key {
|
impl Key {
|
||||||
@ -52,8 +57,8 @@ impl Key {
|
|||||||
.map(parse_expiration_date)
|
.map(parse_expiration_date)
|
||||||
.ok_or(AuthControllerError::MissingParameter("expiresAt"))??;
|
.ok_or(AuthControllerError::MissingParameter("expiresAt"))??;
|
||||||
|
|
||||||
let created_at = Utc::now();
|
let created_at = OffsetDateTime::now_utc();
|
||||||
let updated_at = Utc::now();
|
let updated_at = created_at;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
description,
|
description,
|
||||||
@ -89,24 +94,26 @@ impl Key {
|
|||||||
self.expires_at = parse_expiration_date(exp)?;
|
self.expires_at = parse_expiration_date(exp)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.updated_at = Utc::now();
|
self.updated_at = OffsetDateTime::now_utc();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn default_admin() -> Self {
|
pub(crate) fn default_admin() -> Self {
|
||||||
|
let now = OffsetDateTime::now_utc();
|
||||||
Self {
|
Self {
|
||||||
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
|
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
|
||||||
id: generate_id(),
|
id: generate_id(),
|
||||||
actions: vec![Action::All],
|
actions: vec![Action::All],
|
||||||
indexes: vec!["*".to_string()],
|
indexes: vec!["*".to_string()],
|
||||||
expires_at: None,
|
expires_at: None,
|
||||||
created_at: Utc::now(),
|
created_at: now,
|
||||||
updated_at: Utc::now(),
|
updated_at: now,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn default_search() -> Self {
|
pub(crate) fn default_search() -> Self {
|
||||||
|
let now = OffsetDateTime::now_utc();
|
||||||
Self {
|
Self {
|
||||||
description: Some(
|
description: Some(
|
||||||
"Default Search API Key (Use it to search from the frontend)".to_string(),
|
"Default Search API Key (Use it to search from the frontend)".to_string(),
|
||||||
@ -115,8 +122,8 @@ impl Key {
|
|||||||
actions: vec![Action::Search],
|
actions: vec![Action::Search],
|
||||||
indexes: vec!["*".to_string()],
|
indexes: vec!["*".to_string()],
|
||||||
expires_at: None,
|
expires_at: None,
|
||||||
created_at: Utc::now(),
|
created_at: now,
|
||||||
updated_at: Utc::now(),
|
updated_at: now,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -134,22 +141,34 @@ fn generate_id() -> [u8; KEY_ID_LENGTH] {
|
|||||||
bytes
|
bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_expiration_date(value: &Value) -> Result<Option<DateTime<Utc>>> {
|
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
|
||||||
match value {
|
match value {
|
||||||
Value::String(string) => DateTime::parse_from_rfc3339(string)
|
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
|
||||||
.map(|d| d.into())
|
|
||||||
.or_else(|_| {
|
.or_else(|_| {
|
||||||
NaiveDateTime::parse_from_str(string, "%Y-%m-%dT%H:%M:%S")
|
PrimitiveDateTime::parse(
|
||||||
.map(|naive| DateTime::from_utc(naive, Utc))
|
string,
|
||||||
|
format_description!(
|
||||||
|
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
||||||
|
),
|
||||||
|
).map(|datetime| datetime.assume_utc())
|
||||||
})
|
})
|
||||||
.or_else(|_| {
|
.or_else(|_| {
|
||||||
NaiveDate::parse_from_str(string, "%Y-%m-%d")
|
PrimitiveDateTime::parse(
|
||||||
.map(|naive| DateTime::from_utc(naive.and_hms(0, 0, 0), Utc))
|
string,
|
||||||
|
format_description!(
|
||||||
|
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
||||||
|
),
|
||||||
|
).map(|datetime| datetime.assume_utc())
|
||||||
|
})
|
||||||
|
.or_else(|_| {
|
||||||
|
Date::parse(string, format_description!(
|
||||||
|
"[year repr:full base:calendar]-[month repr:numerical]-[day]"
|
||||||
|
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc())
|
||||||
})
|
})
|
||||||
.map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
.map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
||||||
// check if the key is already expired.
|
// check if the key is already expired.
|
||||||
.and_then(|d| {
|
.and_then(|d| {
|
||||||
if d > Utc::now() {
|
if d > OffsetDateTime::now_utc() {
|
||||||
Ok(d)
|
Ok(d)
|
||||||
} else {
|
} else {
|
||||||
Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
||||||
|
@ -9,10 +9,10 @@ use std::path::Path;
|
|||||||
use std::str::from_utf8;
|
use std::str::from_utf8;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chrono::Utc;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
pub use action::{actions, Action};
|
pub use action::{actions, Action};
|
||||||
use error::{AuthControllerError, Result};
|
use error::{AuthControllerError, Result};
|
||||||
@ -40,18 +40,18 @@ impl AuthController {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_key(&self, value: Value) -> Result<Key> {
|
pub fn create_key(&self, value: Value) -> Result<Key> {
|
||||||
let key = Key::create_from_value(value)?;
|
let key = Key::create_from_value(value)?;
|
||||||
self.store.put_api_key(key)
|
self.store.put_api_key(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_key(&self, key: impl AsRef<str>, value: Value) -> Result<Key> {
|
pub fn update_key(&self, key: impl AsRef<str>, value: Value) -> Result<Key> {
|
||||||
let mut key = self.get_key(key).await?;
|
let mut key = self.get_key(key)?;
|
||||||
key.update_from_value(value)?;
|
key.update_from_value(value)?;
|
||||||
self.store.put_api_key(key)
|
self.store.put_api_key(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_key(&self, key: impl AsRef<str>) -> Result<Key> {
|
pub fn get_key(&self, key: impl AsRef<str>) -> Result<Key> {
|
||||||
self.store
|
self.store
|
||||||
.get_api_key(&key)?
|
.get_api_key(&key)?
|
||||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
|
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
|
||||||
@ -101,11 +101,11 @@ impl AuthController {
|
|||||||
Ok(filters)
|
Ok(filters)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn list_keys(&self) -> Result<Vec<Key>> {
|
pub fn list_keys(&self) -> Result<Vec<Key>> {
|
||||||
self.store.list_api_keys()
|
self.store.list_api_keys()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_key(&self, key: impl AsRef<str>) -> Result<()> {
|
pub fn delete_key(&self, key: impl AsRef<str>) -> Result<()> {
|
||||||
if self.store.delete_api_key(&key)? {
|
if self.store.delete_api_key(&key)? {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
@ -149,7 +149,7 @@ impl AuthController {
|
|||||||
None => self.store.prefix_first_expiration_date(key, action)?,
|
None => self.store.prefix_first_expiration_date(key, action)?,
|
||||||
}) {
|
}) {
|
||||||
// check expiration date.
|
// check expiration date.
|
||||||
Some(Some(exp)) => Ok(Utc::now() < exp),
|
Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp),
|
||||||
// no expiration date.
|
// no expiration date.
|
||||||
Some(None) => Ok(true),
|
Some(None) => Ok(true),
|
||||||
// action or index forbidden.
|
// action or index forbidden.
|
||||||
|
@ -8,9 +8,9 @@ use std::path::Path;
|
|||||||
use std::str;
|
use std::str;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||||
use heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
use milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||||
use heed::{Database, Env, EnvOpenOptions, RwTxn};
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use super::error::Result;
|
use super::error::Result;
|
||||||
use super::{Action, Key};
|
use super::{Action, Key};
|
||||||
@ -27,7 +27,7 @@ pub type KeyId = [u8; KEY_ID_LENGTH];
|
|||||||
pub struct HeedAuthStore {
|
pub struct HeedAuthStore {
|
||||||
env: Arc<Env>,
|
env: Arc<Env>,
|
||||||
keys: Database<ByteSlice, SerdeJson<Key>>,
|
keys: Database<ByteSlice, SerdeJson<Key>>,
|
||||||
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<DateTime<Utc>>>>,
|
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<OffsetDateTime>>>,
|
||||||
should_close_on_drop: bool,
|
should_close_on_drop: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -39,7 +39,7 @@ impl Drop for HeedAuthStore {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open_auth_store_env(path: &Path) -> heed::Result<heed::Env> {
|
pub fn open_auth_store_env(path: &Path) -> milli::heed::Result<milli::heed::Env> {
|
||||||
let mut options = EnvOpenOptions::new();
|
let mut options = EnvOpenOptions::new();
|
||||||
options.map_size(AUTH_STORE_SIZE); // 1GB
|
options.map_size(AUTH_STORE_SIZE); // 1GB
|
||||||
options.max_dbs(2);
|
options.max_dbs(2);
|
||||||
@ -150,7 +150,7 @@ impl HeedAuthStore {
|
|||||||
key: &[u8],
|
key: &[u8],
|
||||||
action: Action,
|
action: Action,
|
||||||
index: Option<&[u8]>,
|
index: Option<&[u8]>,
|
||||||
) -> Result<Option<Option<DateTime<Utc>>>> {
|
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
match self.get_key_id(key) {
|
match self.get_key_id(key) {
|
||||||
Some(id) => {
|
Some(id) => {
|
||||||
@ -165,7 +165,7 @@ impl HeedAuthStore {
|
|||||||
&self,
|
&self,
|
||||||
key: &[u8],
|
key: &[u8],
|
||||||
action: Action,
|
action: Action,
|
||||||
) -> Result<Option<Option<DateTime<Utc>>>> {
|
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
match self.get_key_id(key) {
|
match self.get_key_id(key) {
|
||||||
Some(id) => {
|
Some(id) => {
|
||||||
@ -203,7 +203,7 @@ impl HeedAuthStore {
|
|||||||
/// optionnally on a spcific index, for a given key.
|
/// optionnally on a spcific index, for a given key.
|
||||||
pub struct KeyIdActionCodec;
|
pub struct KeyIdActionCodec;
|
||||||
|
|
||||||
impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||||
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
||||||
|
|
||||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||||
@ -218,7 +218,7 @@ impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||||
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||||
|
|
||||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
|
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||||
|
@ -5,11 +5,11 @@ authors = ["marin <postma.marin@protonmail.com>"]
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4", default-features = false }
|
actix-web = { version = "4.0.1", default-features = false }
|
||||||
proptest = { version = "1.0.0", optional = true }
|
proptest = { version = "1.0.0", optional = true }
|
||||||
proptest-derive = { version = "0.3.0", optional = true }
|
proptest-derive = { version = "0.3.0", optional = true }
|
||||||
serde = { version = "1.0.130", features = ["derive"] }
|
serde = { version = "1.0.136", features = ["derive"] }
|
||||||
serde_json = "1.0.69"
|
serde_json = "1.0.79"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
test-traits = ["proptest", "proptest-derive"]
|
test-traits = ["proptest", "proptest-derive"]
|
||||||
|
@ -11,82 +11,82 @@ name = "meilisearch"
|
|||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
static-files = { version = "0.2.1", optional = true }
|
anyhow = { version = "1.0.56", optional = true }
|
||||||
anyhow = { version = "1.0.43", optional = true }
|
cargo_toml = { version = "0.11.4", optional = true }
|
||||||
cargo_toml = { version = "0.9", optional = true }
|
|
||||||
hex = { version = "0.4.3", optional = true }
|
hex = { version = "0.4.3", optional = true }
|
||||||
reqwest = { version = "0.11.4", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
reqwest = { version = "0.11.9", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
||||||
sha-1 = { version = "0.9.8", optional = true }
|
sha-1 = { version = "0.10.0", optional = true }
|
||||||
tempfile = { version = "3.2.0", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
vergen = { version = "5.1.15", default-features = false, features = ["git"] }
|
tempfile = { version = "3.3.0", optional = true }
|
||||||
|
vergen = { version = "7.0.0", default-features = false, features = ["git"] }
|
||||||
zip = { version = "0.5.13", optional = true }
|
zip = { version = "0.5.13", optional = true }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-cors = "0.6"
|
actix-cors = "0.6.1"
|
||||||
actix-web = { version = "4", features = ["rustls"] }
|
actix-web = { version = "4.0.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
|
||||||
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
anyhow = { version = "1.0.56", features = ["backtrace"] }
|
||||||
arc-swap = "1.3.2"
|
async-stream = "0.3.3"
|
||||||
async-stream = "0.3.2"
|
async-trait = "0.1.52"
|
||||||
async-trait = "0.1.51"
|
|
||||||
bstr = "0.2.17"
|
bstr = "0.2.17"
|
||||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std", "serde"] }
|
byte-unit = { version = "4.0.14", default-features = false, features = ["std", "serde"] }
|
||||||
bytes = "1.1.0"
|
bytes = "1.1.0"
|
||||||
chrono = { version = "0.4.19", features = ["serde"] }
|
clap = { version = "3.1.6", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.1"
|
crossbeam-channel = "0.5.2"
|
||||||
either = "1.6.1"
|
either = "1.6.1"
|
||||||
env_logger = "0.9.0"
|
env_logger = "0.9.0"
|
||||||
flate2 = "1.0.21"
|
flate2 = "1.0.22"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
futures = "0.3.17"
|
futures = "0.3.21"
|
||||||
futures-util = "0.3.17"
|
futures-util = "0.3.21"
|
||||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
http = "0.2.6"
|
||||||
http = "0.2.4"
|
indexmap = { version = "1.8.0", features = ["serde-1"] }
|
||||||
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
itertools = "0.10.3"
|
||||||
itertools = "0.10.1"
|
jsonwebtoken = "8.0.1"
|
||||||
jsonwebtoken = "7"
|
|
||||||
log = "0.4.14"
|
log = "0.4.14"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-error = { path = "../meilisearch-error" }
|
meilisearch-error = { path = "../meilisearch-error" }
|
||||||
meilisearch-lib = { path = "../meilisearch-lib" }
|
meilisearch-lib = { path = "../meilisearch-lib" }
|
||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
num_cpus = "1.13.0"
|
num_cpus = "1.13.1"
|
||||||
obkv = "0.2.0"
|
obkv = "0.2.0"
|
||||||
once_cell = "1.8.0"
|
once_cell = "1.10.0"
|
||||||
parking_lot = "0.11.2"
|
parking_lot = "0.12.0"
|
||||||
pin-project = "1.0.8"
|
pin-project-lite = "0.2.8"
|
||||||
platform-dirs = "0.3.0"
|
platform-dirs = "0.3.0"
|
||||||
rand = "0.8.4"
|
rand = "0.8.5"
|
||||||
rayon = "1.5.1"
|
rayon = "1.5.1"
|
||||||
regex = "1.5.4"
|
regex = "1.5.5"
|
||||||
rustls = "0.20.2"
|
rustls = "0.20.4"
|
||||||
rustls-pemfile = "0.2"
|
rustls-pemfile = "0.3.0"
|
||||||
segment = { version = "0.1.2", optional = true }
|
segment = { version = "0.2.0", optional = true }
|
||||||
serde = { version = "1.0.130", features = ["derive"] }
|
serde = { version = "1.0.136", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||||
sha2 = "0.9.6"
|
sha2 = "0.10.2"
|
||||||
siphasher = "0.3.7"
|
siphasher = "0.3.10"
|
||||||
slice-group-by = "0.2.6"
|
slice-group-by = "0.3.0"
|
||||||
static-files = { version = "0.2.1", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
clap = { version = "3.0", features = ["derive", "env"] }
|
sysinfo = "0.23.5"
|
||||||
sysinfo = "0.20.2"
|
tar = "0.4.38"
|
||||||
tar = "0.4.37"
|
tempfile = "3.3.0"
|
||||||
tempfile = "3.2.0"
|
thiserror = "1.0.30"
|
||||||
thiserror = "1.0.28"
|
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tokio = { version = "1.11.0", features = ["full"] }
|
tokio = { version = "1.17.0", features = ["full"] }
|
||||||
tokio-stream = "0.1.7"
|
tokio-stream = "0.1.8"
|
||||||
uuid = { version = "0.8.2", features = ["serde"] }
|
uuid = { version = "0.8.2", features = ["serde"] }
|
||||||
walkdir = "2.3.2"
|
walkdir = "2.3.2"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.2.0"
|
actix-rt = "2.7.0"
|
||||||
assert-json-diff = "2.0.1"
|
assert-json-diff = "2.0.1"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
paste = "1.0.5"
|
paste = "1.0.6"
|
||||||
serde_url_params = "0.2.1"
|
serde_url_params = "0.2.1"
|
||||||
urlencoding = "2.1.0"
|
urlencoding = "2.1.0"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
default = ["analytics", "mini-dashboard"]
|
||||||
|
analytics = ["segment"]
|
||||||
mini-dashboard = [
|
mini-dashboard = [
|
||||||
"actix-web-static-files",
|
"actix-web-static-files",
|
||||||
"static-files",
|
"static-files",
|
||||||
@ -98,12 +98,10 @@ mini-dashboard = [
|
|||||||
"tempfile",
|
"tempfile",
|
||||||
"zip",
|
"zip",
|
||||||
]
|
]
|
||||||
analytics = ["segment"]
|
|
||||||
default = ["analytics", "mini-dashboard"]
|
|
||||||
|
|
||||||
[target.'cfg(target_os = "linux")'.dependencies]
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
tikv-jemallocator = "0.4.1"
|
tikv-jemallocator = "0.4.3"
|
||||||
|
|
||||||
[package.metadata.mini-dashboard]
|
[package.metadata.mini-dashboard]
|
||||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.7/build.zip"
|
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.9/build.zip"
|
||||||
sha1 = "e2feedf271917c4b7b88998eff5aaaea1d3925b9"
|
sha1 = "b1833c3e5dc6b5d9d519ae4834935ae6c8a47024"
|
||||||
|
@ -6,7 +6,6 @@ use std::time::{Duration, Instant};
|
|||||||
|
|
||||||
use actix_web::http::header::USER_AGENT;
|
use actix_web::http::header::USER_AGENT;
|
||||||
use actix_web::HttpRequest;
|
use actix_web::HttpRequest;
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use http::header::CONTENT_TYPE;
|
use http::header::CONTENT_TYPE;
|
||||||
use meilisearch_auth::SearchRules;
|
use meilisearch_auth::SearchRules;
|
||||||
use meilisearch_lib::index::{SearchQuery, SearchResult};
|
use meilisearch_lib::index::{SearchQuery, SearchResult};
|
||||||
@ -18,6 +17,7 @@ use segment::message::{Identify, Track, User};
|
|||||||
use segment::{AutoBatcher, Batcher, HttpClient};
|
use segment::{AutoBatcher, Batcher, HttpClient};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use sysinfo::{DiskExt, System, SystemExt};
|
use sysinfo::{DiskExt, System, SystemExt};
|
||||||
|
use time::OffsetDateTime;
|
||||||
use tokio::select;
|
use tokio::select;
|
||||||
use tokio::sync::mpsc::{self, Receiver, Sender};
|
use tokio::sync::mpsc::{self, Receiver, Sender};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
@ -323,7 +323,7 @@ impl Segment {
|
|||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct SearchAggregator {
|
pub struct SearchAggregator {
|
||||||
timestamp: Option<DateTime<Utc>>,
|
timestamp: Option<OffsetDateTime>,
|
||||||
|
|
||||||
// context
|
// context
|
||||||
user_agents: HashSet<String>,
|
user_agents: HashSet<String>,
|
||||||
@ -360,7 +360,7 @@ pub struct SearchAggregator {
|
|||||||
impl SearchAggregator {
|
impl SearchAggregator {
|
||||||
pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self {
|
pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self {
|
||||||
let mut ret = Self::default();
|
let mut ret = Self::default();
|
||||||
ret.timestamp = Some(chrono::offset::Utc::now());
|
ret.timestamp = Some(OffsetDateTime::now_utc());
|
||||||
|
|
||||||
ret.total_received = 1;
|
ret.total_received = 1;
|
||||||
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
||||||
@ -504,7 +504,7 @@ impl SearchAggregator {
|
|||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct DocumentsAggregator {
|
pub struct DocumentsAggregator {
|
||||||
timestamp: Option<DateTime<Utc>>,
|
timestamp: Option<OffsetDateTime>,
|
||||||
|
|
||||||
// set to true when at least one request was received
|
// set to true when at least one request was received
|
||||||
updated: bool,
|
updated: bool,
|
||||||
@ -524,7 +524,7 @@ impl DocumentsAggregator {
|
|||||||
request: &HttpRequest,
|
request: &HttpRequest,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut ret = Self::default();
|
let mut ret = Self::default();
|
||||||
ret.timestamp = Some(chrono::offset::Utc::now());
|
ret.timestamp = Some(OffsetDateTime::now_utc());
|
||||||
|
|
||||||
ret.updated = true;
|
ret.updated = true;
|
||||||
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
||||||
|
@ -5,7 +5,7 @@ pub enum AuthenticationError {
|
|||||||
#[error("The Authorization header is missing. It must use the bearer authorization method.")]
|
#[error("The Authorization header is missing. It must use the bearer authorization method.")]
|
||||||
MissingAuthorizationHeader,
|
MissingAuthorizationHeader,
|
||||||
#[error("The provided API key is invalid.")]
|
#[error("The provided API key is invalid.")]
|
||||||
InvalidToken(String),
|
InvalidToken,
|
||||||
// Triggered on configuration error.
|
// Triggered on configuration error.
|
||||||
#[error("An internal error has occurred. `Irretrievable state`.")]
|
#[error("An internal error has occurred. `Irretrievable state`.")]
|
||||||
IrretrievableState,
|
IrretrievableState,
|
||||||
@ -15,7 +15,7 @@ impl ErrorCode for AuthenticationError {
|
|||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
||||||
AuthenticationError::InvalidToken(_) => Code::InvalidToken,
|
AuthenticationError::InvalidToken => Code::InvalidToken,
|
||||||
AuthenticationError::IrretrievableState => Code::Internal,
|
AuthenticationError::IrretrievableState => Code::Internal,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,28 +2,83 @@ mod error;
|
|||||||
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
use std::pin::Pin;
|
||||||
|
|
||||||
use actix_web::FromRequest;
|
use actix_web::FromRequest;
|
||||||
use futures::future::err;
|
use futures::future::err;
|
||||||
use futures::future::{ok, Ready};
|
use futures::Future;
|
||||||
use meilisearch_error::ResponseError;
|
use meilisearch_error::{Code, ResponseError};
|
||||||
|
|
||||||
use error::AuthenticationError;
|
use error::AuthenticationError;
|
||||||
use meilisearch_auth::{AuthController, AuthFilter};
|
use meilisearch_auth::{AuthController, AuthFilter};
|
||||||
|
|
||||||
pub struct GuardedData<T, D> {
|
pub struct GuardedData<P, D> {
|
||||||
data: D,
|
data: D,
|
||||||
filters: AuthFilter,
|
filters: AuthFilter,
|
||||||
_marker: PhantomData<T>,
|
_marker: PhantomData<P>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, D> GuardedData<T, D> {
|
impl<P, D> GuardedData<P, D> {
|
||||||
pub fn filters(&self) -> &AuthFilter {
|
pub fn filters(&self) -> &AuthFilter {
|
||||||
&self.filters
|
&self.filters
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn auth_bearer(
|
||||||
|
auth: AuthController,
|
||||||
|
token: String,
|
||||||
|
index: Option<String>,
|
||||||
|
data: Option<D>,
|
||||||
|
) -> Result<Self, ResponseError>
|
||||||
|
where
|
||||||
|
P: Policy + 'static,
|
||||||
|
{
|
||||||
|
match Self::authenticate(auth, token, index).await? {
|
||||||
|
Some(filters) => match data {
|
||||||
|
Some(data) => Ok(Self {
|
||||||
|
data,
|
||||||
|
filters,
|
||||||
|
_marker: PhantomData,
|
||||||
|
}),
|
||||||
|
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||||
|
},
|
||||||
|
None => Err(AuthenticationError::InvalidToken.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn auth_token(auth: AuthController, data: Option<D>) -> Result<Self, ResponseError>
|
||||||
|
where
|
||||||
|
P: Policy + 'static,
|
||||||
|
{
|
||||||
|
match Self::authenticate(auth, String::new(), None).await? {
|
||||||
|
Some(filters) => match data {
|
||||||
|
Some(data) => Ok(Self {
|
||||||
|
data,
|
||||||
|
filters,
|
||||||
|
_marker: PhantomData,
|
||||||
|
}),
|
||||||
|
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||||
|
},
|
||||||
|
None => Err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn authenticate(
|
||||||
|
auth: AuthController,
|
||||||
|
token: String,
|
||||||
|
index: Option<String>,
|
||||||
|
) -> Result<Option<AuthFilter>, ResponseError>
|
||||||
|
where
|
||||||
|
P: Policy + 'static,
|
||||||
|
{
|
||||||
|
Ok(tokio::task::spawn_blocking(move || {
|
||||||
|
P::authenticate(auth, token.as_ref(), index.as_deref())
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))?)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, D> Deref for GuardedData<T, D> {
|
impl<P, D> Deref for GuardedData<P, D> {
|
||||||
type Target = D;
|
type Target = D;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
@ -34,7 +89,7 @@ impl<T, D> Deref for GuardedData<T, D> {
|
|||||||
impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D> {
|
impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D> {
|
||||||
type Error = ResponseError;
|
type Error = ResponseError;
|
||||||
|
|
||||||
type Future = Ready<Result<Self, Self::Error>>;
|
type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
|
||||||
|
|
||||||
fn from_request(
|
fn from_request(
|
||||||
req: &actix_web::HttpRequest,
|
req: &actix_web::HttpRequest,
|
||||||
@ -51,40 +106,22 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
|
|||||||
// TODO: find a less hardcoded way?
|
// TODO: find a less hardcoded way?
|
||||||
let index = req.match_info().get("index_uid");
|
let index = req.match_info().get("index_uid");
|
||||||
match type_token.next() {
|
match type_token.next() {
|
||||||
Some(token) => match P::authenticate(auth, token, index) {
|
Some(token) => Box::pin(Self::auth_bearer(
|
||||||
Some(filters) => match req.app_data::<D>().cloned() {
|
auth,
|
||||||
Some(data) => ok(Self {
|
token.to_string(),
|
||||||
data,
|
index.map(String::from),
|
||||||
filters,
|
req.app_data::<D>().cloned(),
|
||||||
_marker: PhantomData,
|
)),
|
||||||
}),
|
None => Box::pin(err(AuthenticationError::InvalidToken.into())),
|
||||||
None => err(AuthenticationError::IrretrievableState.into()),
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
let token = token.to_string();
|
|
||||||
err(AuthenticationError::InvalidToken(token).into())
|
|
||||||
}
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
err(AuthenticationError::InvalidToken("unknown".to_string()).into())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_otherwise => {
|
||||||
|
Box::pin(err(AuthenticationError::MissingAuthorizationHeader.into()))
|
||||||
}
|
}
|
||||||
_otherwise => err(AuthenticationError::MissingAuthorizationHeader.into()),
|
|
||||||
},
|
},
|
||||||
None => match P::authenticate(auth, "", None) {
|
None => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
|
||||||
Some(filters) => match req.app_data::<D>().cloned() {
|
|
||||||
Some(data) => ok(Self {
|
|
||||||
data,
|
|
||||||
filters,
|
|
||||||
_marker: PhantomData,
|
|
||||||
}),
|
|
||||||
None => err(AuthenticationError::IrretrievableState.into()),
|
|
||||||
},
|
},
|
||||||
None => err(AuthenticationError::MissingAuthorizationHeader.into()),
|
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
|
||||||
},
|
|
||||||
},
|
|
||||||
None => err(AuthenticationError::IrretrievableState.into()),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -94,21 +131,22 @@ pub trait Policy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub mod policies {
|
pub mod policies {
|
||||||
use chrono::Utc;
|
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
||||||
use jsonwebtoken::{dangerous_insecure_decode, decode, Algorithm, DecodingKey, Validation};
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::extractors::authentication::Policy;
|
use crate::extractors::authentication::Policy;
|
||||||
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
|
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
|
||||||
// reexport actions in policies in order to be used in routes configuration.
|
// reexport actions in policies in order to be used in routes configuration.
|
||||||
pub use meilisearch_auth::actions;
|
pub use meilisearch_auth::actions;
|
||||||
|
|
||||||
pub static TENANT_TOKEN_VALIDATION: Lazy<Validation> = Lazy::new(|| Validation {
|
fn tenant_token_validation() -> Validation {
|
||||||
validate_exp: false,
|
let mut validation = Validation::default();
|
||||||
algorithms: vec![Algorithm::HS256, Algorithm::HS384, Algorithm::HS512],
|
validation.validate_exp = false;
|
||||||
..Default::default()
|
validation.required_spec_claims.remove("exp");
|
||||||
});
|
validation.algorithms = vec![Algorithm::HS256, Algorithm::HS384, Algorithm::HS512];
|
||||||
|
validation
|
||||||
|
}
|
||||||
|
|
||||||
pub struct MasterPolicy;
|
pub struct MasterPolicy;
|
||||||
|
|
||||||
@ -167,12 +205,17 @@ pub mod policies {
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut validation = tenant_token_validation();
|
||||||
|
validation.insecure_disable_signature_validation();
|
||||||
|
let dummy_key = DecodingKey::from_secret(b"secret");
|
||||||
|
let token_data = decode::<Claims>(token, &dummy_key, &validation).ok()?;
|
||||||
|
|
||||||
// get token fields without validating it.
|
// get token fields without validating it.
|
||||||
let Claims {
|
let Claims {
|
||||||
search_rules,
|
search_rules,
|
||||||
exp,
|
exp,
|
||||||
api_key_prefix,
|
api_key_prefix,
|
||||||
} = dangerous_insecure_decode::<Claims>(token).ok()?.claims;
|
} = token_data.claims;
|
||||||
|
|
||||||
// Check index access if an index restriction is provided.
|
// Check index access if an index restriction is provided.
|
||||||
if let Some(index) = index {
|
if let Some(index) = index {
|
||||||
@ -183,7 +226,7 @@ pub mod policies {
|
|||||||
|
|
||||||
// Check if token is expired.
|
// Check if token is expired.
|
||||||
if let Some(exp) = exp {
|
if let Some(exp) = exp {
|
||||||
if Utc::now().timestamp() > exp {
|
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -198,7 +241,7 @@ pub mod policies {
|
|||||||
decode::<Claims>(
|
decode::<Claims>(
|
||||||
token,
|
token,
|
||||||
&DecodingKey::from_secret(key.as_bytes()),
|
&DecodingKey::from_secret(key.as_bytes()),
|
||||||
&TENANT_TOKEN_VALIDATION,
|
&tenant_token_validation(),
|
||||||
)
|
)
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
pub mod payload;
|
pub mod payload;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod authentication;
|
pub mod authentication;
|
||||||
|
pub mod sequential_extractor;
|
||||||
|
148
meilisearch-http/src/extractors/sequential_extractor.rs
Normal file
148
meilisearch-http/src/extractors/sequential_extractor.rs
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
#![allow(non_snake_case)]
|
||||||
|
use std::{future::Future, pin::Pin, task::Poll};
|
||||||
|
|
||||||
|
use actix_web::{dev::Payload, FromRequest, Handler, HttpRequest};
|
||||||
|
use pin_project_lite::pin_project;
|
||||||
|
|
||||||
|
/// `SeqHandler` is an actix `Handler` that enforces that extractors errors are returned in the
|
||||||
|
/// same order as they are defined in the wrapped handler. This is needed because, by default, actix
|
||||||
|
/// resolves the extractors concurrently, whereas we always need the authentication extractor to
|
||||||
|
/// throw first.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct SeqHandler<H>(pub H);
|
||||||
|
|
||||||
|
pub struct SeqFromRequest<T>(T);
|
||||||
|
|
||||||
|
/// This macro implements `FromRequest` for arbitrary arity handler, except for one, which is
|
||||||
|
/// useless anyway.
|
||||||
|
macro_rules! gen_seq {
|
||||||
|
($ty:ident; $($T:ident)+) => {
|
||||||
|
pin_project! {
|
||||||
|
pub struct $ty<$($T: FromRequest), +> {
|
||||||
|
$(
|
||||||
|
#[pin]
|
||||||
|
$T: ExtractFuture<$T::Future, $T, $T::Error>,
|
||||||
|
)+
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<$($T: FromRequest), +> Future for $ty<$($T),+> {
|
||||||
|
type Output = Result<SeqFromRequest<($($T),+)>, actix_web::Error>;
|
||||||
|
|
||||||
|
fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
|
||||||
|
let mut this = self.project();
|
||||||
|
|
||||||
|
let mut count_fut = 0;
|
||||||
|
let mut count_finished = 0;
|
||||||
|
|
||||||
|
$(
|
||||||
|
count_fut += 1;
|
||||||
|
match this.$T.as_mut().project() {
|
||||||
|
ExtractProj::Future { fut } => match fut.poll(cx) {
|
||||||
|
Poll::Ready(Ok(output)) => {
|
||||||
|
count_finished += 1;
|
||||||
|
let _ = this
|
||||||
|
.$T
|
||||||
|
.as_mut()
|
||||||
|
.project_replace(ExtractFuture::Done { output });
|
||||||
|
}
|
||||||
|
Poll::Ready(Err(error)) => {
|
||||||
|
count_finished += 1;
|
||||||
|
let _ = this
|
||||||
|
.$T
|
||||||
|
.as_mut()
|
||||||
|
.project_replace(ExtractFuture::Error { error });
|
||||||
|
}
|
||||||
|
Poll::Pending => (),
|
||||||
|
},
|
||||||
|
ExtractProj::Done { .. } => count_finished += 1,
|
||||||
|
ExtractProj::Error { .. } => {
|
||||||
|
// short circuit if all previous are finished and we had an error.
|
||||||
|
if count_finished == count_fut {
|
||||||
|
match this.$T.project_replace(ExtractFuture::Empty) {
|
||||||
|
ExtractReplaceProj::Error { error } => {
|
||||||
|
return Poll::Ready(Err(error.into()))
|
||||||
|
}
|
||||||
|
_ => unreachable!("Invalid future state"),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
count_finished += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ExtractProj::Empty => unreachable!("From request polled after being finished. {}", stringify!($T)),
|
||||||
|
}
|
||||||
|
)+
|
||||||
|
|
||||||
|
if count_fut == count_finished {
|
||||||
|
let result = (
|
||||||
|
$(
|
||||||
|
match this.$T.project_replace(ExtractFuture::Empty) {
|
||||||
|
ExtractReplaceProj::Done { output } => output,
|
||||||
|
ExtractReplaceProj::Error { error } => return Poll::Ready(Err(error.into())),
|
||||||
|
_ => unreachable!("Invalid future state"),
|
||||||
|
},
|
||||||
|
)+
|
||||||
|
);
|
||||||
|
|
||||||
|
Poll::Ready(Ok(SeqFromRequest(result)))
|
||||||
|
} else {
|
||||||
|
Poll::Pending
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<$($T: FromRequest,)+> FromRequest for SeqFromRequest<($($T,)+)> {
|
||||||
|
type Error = actix_web::Error;
|
||||||
|
|
||||||
|
type Future = $ty<$($T),+>;
|
||||||
|
|
||||||
|
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||||
|
$ty {
|
||||||
|
$(
|
||||||
|
$T: ExtractFuture::Future {
|
||||||
|
fut: $T::from_request(req, payload),
|
||||||
|
},
|
||||||
|
)+
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Han, $($T: FromRequest),+> Handler<SeqFromRequest<($($T),+)>> for SeqHandler<Han>
|
||||||
|
where
|
||||||
|
Han: Handler<($($T),+)>,
|
||||||
|
{
|
||||||
|
type Output = Han::Output;
|
||||||
|
type Future = Han::Future;
|
||||||
|
|
||||||
|
fn call(&self, args: SeqFromRequest<($($T),+)>) -> Self::Future {
|
||||||
|
self.0.call(args.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not working for a single argument, but then, it is not really necessary.
|
||||||
|
// gen_seq! { SeqFromRequestFut1; A }
|
||||||
|
gen_seq! { SeqFromRequestFut2; A B }
|
||||||
|
gen_seq! { SeqFromRequestFut3; A B C }
|
||||||
|
gen_seq! { SeqFromRequestFut4; A B C D }
|
||||||
|
gen_seq! { SeqFromRequestFut5; A B C D E }
|
||||||
|
gen_seq! { SeqFromRequestFut6; A B C D E F }
|
||||||
|
|
||||||
|
pin_project! {
|
||||||
|
#[project = ExtractProj]
|
||||||
|
#[project_replace = ExtractReplaceProj]
|
||||||
|
enum ExtractFuture<Fut, Res, Err> {
|
||||||
|
Future {
|
||||||
|
#[pin]
|
||||||
|
fut: Fut,
|
||||||
|
},
|
||||||
|
Done {
|
||||||
|
output: Res,
|
||||||
|
},
|
||||||
|
Error {
|
||||||
|
error: Err,
|
||||||
|
},
|
||||||
|
Empty,
|
||||||
|
}
|
||||||
|
}
|
@ -1,10 +1,11 @@
|
|||||||
|
use meilisearch_lib::heed::Env;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
pub trait EnvSizer {
|
pub trait EnvSizer {
|
||||||
fn size(&self) -> u64;
|
fn size(&self) -> u64;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EnvSizer for heed::Env {
|
impl EnvSizer for Env {
|
||||||
fn size(&self) -> u64 {
|
fn size(&self) -> u64 {
|
||||||
WalkDir::new(self.path())
|
WalkDir::new(self.path())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -32,7 +32,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
|||||||
|
|
||||||
// enable autobatching?
|
// enable autobatching?
|
||||||
let _ = AUTOBATCHING_ENABLED.store(
|
let _ = AUTOBATCHING_ENABLED.store(
|
||||||
opt.scheduler_options.enable_autobatching,
|
opt.scheduler_options.enable_auto_batching,
|
||||||
std::sync::atomic::Ordering::Relaxed,
|
std::sync::atomic::Ordering::Relaxed,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -42,6 +42,7 @@ pub struct Opt {
|
|||||||
|
|
||||||
/// Do not send analytics to Meili.
|
/// Do not send analytics to Meili.
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
|
#[serde(skip)] // we can't send true
|
||||||
#[clap(long, env = "MEILI_NO_ANALYTICS")]
|
#[clap(long, env = "MEILI_NO_ANALYTICS")]
|
||||||
pub no_analytics: bool,
|
pub no_analytics: bool,
|
||||||
|
|
||||||
@ -148,6 +149,7 @@ pub struct Opt {
|
|||||||
#[clap(skip)]
|
#[clap(skip)]
|
||||||
pub indexer_options: IndexerOpts,
|
pub indexer_options: IndexerOpts,
|
||||||
|
|
||||||
|
#[serde(flatten)]
|
||||||
#[clap(flatten)]
|
#[clap(flatten)]
|
||||||
pub scheduler_options: SchedulerConfig,
|
pub scheduler_options: SchedulerConfig,
|
||||||
}
|
}
|
||||||
@ -256,3 +258,13 @@ fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> {
|
|||||||
|
|
||||||
Ok(ret)
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_valid_opt() {
|
||||||
|
assert!(Opt::try_parse_from(Some("")).is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,26 +1,29 @@
|
|||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use chrono::SecondsFormat;
|
|
||||||
|
|
||||||
use meilisearch_auth::{Action, AuthController, Key};
|
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::{
|
||||||
use meilisearch_error::ResponseError;
|
authentication::{policies::*, GuardedData},
|
||||||
|
sequential_extractor::SeqHandler,
|
||||||
|
};
|
||||||
|
use meilisearch_error::{Code, ResponseError};
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::post().to(create_api_key))
|
.route(web::post().to(SeqHandler(create_api_key)))
|
||||||
.route(web::get().to(list_api_keys)),
|
.route(web::get().to(SeqHandler(list_api_keys))),
|
||||||
)
|
)
|
||||||
.service(
|
.service(
|
||||||
web::resource("/{api_key}")
|
web::resource("/{api_key}")
|
||||||
.route(web::get().to(get_api_key))
|
.route(web::get().to(SeqHandler(get_api_key)))
|
||||||
.route(web::patch().to(patch_api_key))
|
.route(web::patch().to(SeqHandler(patch_api_key)))
|
||||||
.route(web::delete().to(delete_api_key)),
|
.route(web::delete().to(SeqHandler(delete_api_key))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -29,8 +32,13 @@ pub async fn create_api_key(
|
|||||||
body: web::Json<Value>,
|
body: web::Json<Value>,
|
||||||
_req: HttpRequest,
|
_req: HttpRequest,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let key = auth_controller.create_key(body.into_inner()).await?;
|
let v = body.into_inner();
|
||||||
let res = KeyView::from_key(key, &auth_controller);
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
|
let key = auth_controller.create_key(v)?;
|
||||||
|
Ok(KeyView::from_key(key, &auth_controller))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
Ok(HttpResponse::Created().json(res))
|
Ok(HttpResponse::Created().json(res))
|
||||||
}
|
}
|
||||||
@ -39,11 +47,16 @@ pub async fn list_api_keys(
|
|||||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||||
_req: HttpRequest,
|
_req: HttpRequest,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let keys = auth_controller.list_keys().await?;
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
|
let keys = auth_controller.list_keys()?;
|
||||||
let res: Vec<_> = keys
|
let res: Vec<_> = keys
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|k| KeyView::from_key(k, &auth_controller))
|
.map(|k| KeyView::from_key(k, &auth_controller))
|
||||||
.collect();
|
.collect();
|
||||||
|
Ok(res)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
|
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
|
||||||
}
|
}
|
||||||
@ -52,8 +65,13 @@ pub async fn get_api_key(
|
|||||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||||
path: web::Path<AuthParam>,
|
path: web::Path<AuthParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let key = auth_controller.get_key(&path.api_key).await?;
|
let api_key = path.into_inner().api_key;
|
||||||
let res = KeyView::from_key(key, &auth_controller);
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
|
let key = auth_controller.get_key(&api_key)?;
|
||||||
|
Ok(KeyView::from_key(key, &auth_controller))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(res))
|
Ok(HttpResponse::Ok().json(res))
|
||||||
}
|
}
|
||||||
@ -63,10 +81,14 @@ pub async fn patch_api_key(
|
|||||||
body: web::Json<Value>,
|
body: web::Json<Value>,
|
||||||
path: web::Path<AuthParam>,
|
path: web::Path<AuthParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let key = auth_controller
|
let api_key = path.into_inner().api_key;
|
||||||
.update_key(&path.api_key, body.into_inner())
|
let body = body.into_inner();
|
||||||
.await?;
|
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
let res = KeyView::from_key(key, &auth_controller);
|
let key = auth_controller.update_key(&api_key, body)?;
|
||||||
|
Ok(KeyView::from_key(key, &auth_controller))
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(res))
|
Ok(HttpResponse::Ok().json(res))
|
||||||
}
|
}
|
||||||
@ -75,7 +97,10 @@ pub async fn delete_api_key(
|
|||||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||||
path: web::Path<AuthParam>,
|
path: web::Path<AuthParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
auth_controller.delete_key(&path.api_key).await?;
|
let api_key = path.into_inner().api_key;
|
||||||
|
tokio::task::spawn_blocking(move || auth_controller.delete_key(&api_key))
|
||||||
|
.await
|
||||||
|
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().finish())
|
Ok(HttpResponse::NoContent().finish())
|
||||||
}
|
}
|
||||||
@ -92,9 +117,12 @@ struct KeyView {
|
|||||||
key: String,
|
key: String,
|
||||||
actions: Vec<Action>,
|
actions: Vec<Action>,
|
||||||
indexes: Vec<String>,
|
indexes: Vec<String>,
|
||||||
expires_at: Option<String>,
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||||
created_at: String,
|
expires_at: Option<OffsetDateTime>,
|
||||||
updated_at: String,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
created_at: OffsetDateTime,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
updated_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl KeyView {
|
impl KeyView {
|
||||||
@ -107,11 +135,9 @@ impl KeyView {
|
|||||||
key: generated_key,
|
key: generated_key,
|
||||||
actions: key.actions,
|
actions: key.actions,
|
||||||
indexes: key.indexes,
|
indexes: key.indexes,
|
||||||
expires_at: key
|
expires_at: key.expires_at,
|
||||||
.expires_at
|
created_at: key.created_at,
|
||||||
.map(|dt| dt.to_rfc3339_opts(SecondsFormat::Secs, true)),
|
updated_at: key.updated_at,
|
||||||
created_at: key.created_at.to_rfc3339_opts(SecondsFormat::Secs, true),
|
|
||||||
updated_at: key.updated_at.to_rfc3339_opts(SecondsFormat::Secs, true),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,10 +7,13 @@ use serde_json::json;
|
|||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("").route(web::post().to(create_dump)))
|
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))))
|
||||||
.service(web::resource("/{dump_uid}/status").route(web::get().to(get_dump_status)));
|
.service(
|
||||||
|
web::resource("/{dump_uid}/status").route(web::get().to(SeqHandler(get_dump_status))),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_dump(
|
pub async fn create_dump(
|
||||||
|
@ -20,6 +20,7 @@ use crate::analytics::Analytics;
|
|||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
use crate::extractors::payload::Payload;
|
use crate::extractors::payload::Payload;
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::task::SummarizedTaskView;
|
use crate::task::SummarizedTaskView;
|
||||||
|
|
||||||
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
||||||
@ -71,17 +72,17 @@ pub struct DocumentParam {
|
|||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::get().to(get_all_documents))
|
.route(web::get().to(SeqHandler(get_all_documents)))
|
||||||
.route(web::post().to(add_documents))
|
.route(web::post().to(SeqHandler(add_documents)))
|
||||||
.route(web::put().to(update_documents))
|
.route(web::put().to(SeqHandler(update_documents)))
|
||||||
.route(web::delete().to(clear_all_documents)),
|
.route(web::delete().to(SeqHandler(clear_all_documents))),
|
||||||
)
|
)
|
||||||
// this route needs to be before the /documents/{document_id} to match properly
|
// this route needs to be before the /documents/{document_id} to match properly
|
||||||
.service(web::resource("/delete-batch").route(web::post().to(delete_documents)))
|
.service(web::resource("/delete-batch").route(web::post().to(SeqHandler(delete_documents))))
|
||||||
.service(
|
.service(
|
||||||
web::resource("/{document_id}")
|
web::resource("/{document_id}")
|
||||||
.route(web::get().to(get_document))
|
.route(web::get().to(SeqHandler(get_document)))
|
||||||
.route(web::delete().to(delete_document)),
|
.route(web::delete().to(SeqHandler(delete_document))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_error::ResponseError;
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::index_controller::Update;
|
use meilisearch_lib::index_controller::Update;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::task::SummarizedTaskView;
|
use crate::task::SummarizedTaskView;
|
||||||
|
|
||||||
pub mod documents;
|
pub mod documents;
|
||||||
@ -20,17 +21,17 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::get().to(list_indexes))
|
.route(web::get().to(list_indexes))
|
||||||
.route(web::post().to(create_index)),
|
.route(web::post().to(SeqHandler(create_index))),
|
||||||
)
|
)
|
||||||
.service(
|
.service(
|
||||||
web::scope("/{index_uid}")
|
web::scope("/{index_uid}")
|
||||||
.service(
|
.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::get().to(get_index))
|
.route(web::get().to(SeqHandler(get_index)))
|
||||||
.route(web::put().to(update_index))
|
.route(web::put().to(SeqHandler(update_index)))
|
||||||
.route(web::delete().to(delete_index)),
|
.route(web::delete().to(SeqHandler(delete_index))),
|
||||||
)
|
)
|
||||||
.service(web::resource("/stats").route(web::get().to(get_index_stats)))
|
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
|
||||||
.service(web::scope("/documents").configure(documents::configure))
|
.service(web::scope("/documents").configure(documents::configure))
|
||||||
.service(web::scope("/search").configure(search::configure))
|
.service(web::scope("/search").configure(search::configure))
|
||||||
.service(web::scope("/tasks").configure(tasks::configure))
|
.service(web::scope("/tasks").configure(tasks::configure))
|
||||||
@ -95,9 +96,12 @@ pub struct UpdateIndexRequest {
|
|||||||
pub struct UpdateIndexResponse {
|
pub struct UpdateIndexResponse {
|
||||||
name: String,
|
name: String,
|
||||||
uid: String,
|
uid: String,
|
||||||
created_at: DateTime<Utc>,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
updated_at: DateTime<Utc>,
|
created_at: OffsetDateTime,
|
||||||
primary_key: Option<String>,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
updated_at: OffsetDateTime,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
primary_key: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_index(
|
pub async fn get_index(
|
||||||
|
@ -9,12 +9,13 @@ use serde_json::Value;
|
|||||||
|
|
||||||
use crate::analytics::{Analytics, SearchAggregator};
|
use crate::analytics::{Analytics, SearchAggregator};
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::get().to(search_with_url_query))
|
.route(web::get().to(SeqHandler(search_with_url_query)))
|
||||||
.route(web::post().to(search_with_post)),
|
.route(web::post().to(SeqHandler(search_with_post))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,6 +23,7 @@ macro_rules! make_setting_route {
|
|||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::task::SummarizedTaskView;
|
use crate::task::SummarizedTaskView;
|
||||||
use meilisearch_error::ResponseError;
|
use meilisearch_error::ResponseError;
|
||||||
|
|
||||||
@ -98,9 +99,9 @@ macro_rules! make_setting_route {
|
|||||||
|
|
||||||
pub fn resources() -> Resource {
|
pub fn resources() -> Resource {
|
||||||
Resource::new($route)
|
Resource::new($route)
|
||||||
.route(web::get().to(get))
|
.route(web::get().to(SeqHandler(get)))
|
||||||
.route(web::post().to(update))
|
.route(web::post().to(SeqHandler(update)))
|
||||||
.route(web::delete().to(delete))
|
.route(web::delete().to(SeqHandler(delete)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -226,11 +227,12 @@ make_setting_route!(
|
|||||||
macro_rules! generate_configure {
|
macro_rules! generate_configure {
|
||||||
($($mod:ident),*) => {
|
($($mod:ident),*) => {
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
cfg.service(
|
cfg.service(
|
||||||
web::resource("")
|
web::resource("")
|
||||||
.route(web::post().to(update_all))
|
.route(web::post().to(SeqHandler(update_all)))
|
||||||
.route(web::get().to(get_all))
|
.route(web::get().to(SeqHandler(get_all)))
|
||||||
.route(web::delete().to(delete_all)))
|
.route(web::delete().to(SeqHandler(delete_all))))
|
||||||
$(.service($mod::resources()))*;
|
$(.service($mod::resources()))*;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,18 +1,19 @@
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_error::ResponseError;
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::task::{TaskListView, TaskView};
|
use crate::task::{TaskListView, TaskView};
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("").route(web::get().to(get_all_tasks_status)))
|
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_all_tasks_status))))
|
||||||
.service(web::resource("{task_id}").route(web::get().to(get_task_status)));
|
.service(web::resource("{task_id}").route(web::get().to(SeqHandler(get_task_status))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
@ -20,9 +21,12 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
pub struct UpdateIndexResponse {
|
pub struct UpdateIndexResponse {
|
||||||
name: String,
|
name: String,
|
||||||
uid: String,
|
uid: String,
|
||||||
created_at: DateTime<Utc>,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
updated_at: DateTime<Utc>,
|
created_at: OffsetDateTime,
|
||||||
primary_key: Option<String>,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
updated_at: OffsetDateTime,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
primary_key: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use meilisearch_error::ResponseError;
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::index::{Settings, Unchecked};
|
use meilisearch_lib::index::{Settings, Unchecked};
|
||||||
@ -54,8 +54,10 @@ pub struct ProcessedUpdateResult {
|
|||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
pub update_type: UpdateType,
|
pub update_type: UpdateType,
|
||||||
pub duration: f64, // in seconds
|
pub duration: f64, // in seconds
|
||||||
pub enqueued_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
pub processed_at: DateTime<Utc>,
|
pub enqueued_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub processed_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@ -66,8 +68,10 @@ pub struct FailedUpdateResult {
|
|||||||
pub update_type: UpdateType,
|
pub update_type: UpdateType,
|
||||||
pub error: ResponseError,
|
pub error: ResponseError,
|
||||||
pub duration: f64, // in seconds
|
pub duration: f64, // in seconds
|
||||||
pub enqueued_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
pub processed_at: DateTime<Utc>,
|
pub enqueued_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub processed_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@ -76,9 +80,13 @@ pub struct EnqueuedUpdateResult {
|
|||||||
pub update_id: u64,
|
pub update_id: u64,
|
||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
pub update_type: UpdateType,
|
pub update_type: UpdateType,
|
||||||
pub enqueued_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
pub enqueued_at: OffsetDateTime,
|
||||||
pub started_processing_at: Option<DateTime<Utc>>,
|
#[serde(
|
||||||
|
skip_serializing_if = "Option::is_none",
|
||||||
|
with = "time::serde::rfc3339::option"
|
||||||
|
)]
|
||||||
|
pub started_processing_at: Option<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
@ -7,11 +7,12 @@ use serde_json::json;
|
|||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::task::{TaskListView, TaskView};
|
use crate::task::{TaskListView, TaskView};
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("").route(web::get().to(get_tasks)))
|
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
|
||||||
.service(web::resource("/{task_id}").route(web::get().to(get_task)));
|
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_tasks(
|
async fn get_tasks(
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
use chrono::{DateTime, Duration, Utc};
|
use std::fmt::Write;
|
||||||
|
use std::write;
|
||||||
|
|
||||||
use meilisearch_error::ResponseError;
|
use meilisearch_error::ResponseError;
|
||||||
use meilisearch_lib::index::{Settings, Unchecked};
|
use meilisearch_lib::index::{Settings, Unchecked};
|
||||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||||
@ -7,6 +9,7 @@ use meilisearch_lib::tasks::task::{
|
|||||||
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
|
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
|
||||||
};
|
};
|
||||||
use serde::{Serialize, Serializer};
|
use serde::{Serialize, Serializer};
|
||||||
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use crate::AUTOBATCHING_ENABLED;
|
use crate::AUTOBATCHING_ENABLED;
|
||||||
|
|
||||||
@ -79,14 +82,52 @@ enum TaskDetails {
|
|||||||
ClearAll { deleted_documents: Option<u64> },
|
ClearAll { deleted_documents: Option<u64> },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
|
||||||
|
/// https://github.com/time-rs/time/issues/378.
|
||||||
|
/// This code is a port of the old code of time that was removed in 0.2.
|
||||||
fn serialize_duration<S: Serializer>(
|
fn serialize_duration<S: Serializer>(
|
||||||
duration: &Option<Duration>,
|
duration: &Option<Duration>,
|
||||||
serializer: S,
|
serializer: S,
|
||||||
) -> Result<S::Ok, S::Error> {
|
) -> Result<S::Ok, S::Error> {
|
||||||
match duration {
|
match duration {
|
||||||
Some(duration) => {
|
Some(duration) => {
|
||||||
let duration_str = duration.to_string();
|
// technically speaking, negative duration is not valid ISO 8601
|
||||||
serializer.serialize_str(&duration_str)
|
if duration.is_negative() {
|
||||||
|
return serializer.serialize_none();
|
||||||
|
}
|
||||||
|
|
||||||
|
const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds();
|
||||||
|
let secs = duration.whole_seconds();
|
||||||
|
let days = secs / SECS_PER_DAY;
|
||||||
|
let secs = secs - days * SECS_PER_DAY;
|
||||||
|
let hasdate = days != 0;
|
||||||
|
let nanos = duration.subsec_nanoseconds();
|
||||||
|
let hastime = (secs != 0 || nanos != 0) || !hasdate;
|
||||||
|
|
||||||
|
// all the following unwrap can't fail
|
||||||
|
let mut res = String::new();
|
||||||
|
write!(&mut res, "P").unwrap();
|
||||||
|
|
||||||
|
if hasdate {
|
||||||
|
write!(&mut res, "{}D", days).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds();
|
||||||
|
const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds();
|
||||||
|
|
||||||
|
if hastime {
|
||||||
|
if nanos == 0 {
|
||||||
|
write!(&mut res, "T{}S", secs).unwrap();
|
||||||
|
} else if nanos % NANOS_PER_MILLI == 0 {
|
||||||
|
write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap();
|
||||||
|
} else if nanos % NANOS_PER_MICRO == 0 {
|
||||||
|
write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap();
|
||||||
|
} else {
|
||||||
|
write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
serializer.serialize_str(&res)
|
||||||
}
|
}
|
||||||
None => serializer.serialize_none(),
|
None => serializer.serialize_none(),
|
||||||
}
|
}
|
||||||
@ -106,9 +147,12 @@ pub struct TaskView {
|
|||||||
error: Option<ResponseError>,
|
error: Option<ResponseError>,
|
||||||
#[serde(serialize_with = "serialize_duration")]
|
#[serde(serialize_with = "serialize_duration")]
|
||||||
duration: Option<Duration>,
|
duration: Option<Duration>,
|
||||||
enqueued_at: DateTime<Utc>,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
started_at: Option<DateTime<Utc>>,
|
enqueued_at: OffsetDateTime,
|
||||||
finished_at: Option<DateTime<Utc>>,
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||||
|
started_at: Option<OffsetDateTime>,
|
||||||
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||||
|
finished_at: Option<OffsetDateTime>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
batch_uid: Option<Option<BatchId>>,
|
batch_uid: Option<Option<BatchId>>,
|
||||||
}
|
}
|
||||||
@ -302,7 +346,8 @@ pub struct SummarizedTaskView {
|
|||||||
status: TaskStatus,
|
status: TaskStatus,
|
||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
task_type: TaskType,
|
task_type: TaskType,
|
||||||
enqueued_at: DateTime<Utc>,
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||||
|
enqueued_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Task> for SummarizedTaskView {
|
impl From<Task> for SummarizedTaskView {
|
||||||
|
@ -257,7 +257,7 @@ async fn error_add_api_key_missing_parameter() {
|
|||||||
"message": "`indexes` field is mandatory.",
|
"message": "`indexes` field is mandatory.",
|
||||||
"code": "missing_parameter",
|
"code": "missing_parameter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link":"https://docs.meilisearch.com/errors#missing_parameter"
|
"link": "https://docs.meilisearch.com/errors#missing_parameter"
|
||||||
});
|
});
|
||||||
|
|
||||||
assert_eq!(response, expected_response);
|
assert_eq!(response, expected_response);
|
||||||
@ -275,7 +275,7 @@ async fn error_add_api_key_missing_parameter() {
|
|||||||
"message": "`actions` field is mandatory.",
|
"message": "`actions` field is mandatory.",
|
||||||
"code": "missing_parameter",
|
"code": "missing_parameter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link":"https://docs.meilisearch.com/errors#missing_parameter"
|
"link": "https://docs.meilisearch.com/errors#missing_parameter"
|
||||||
});
|
});
|
||||||
|
|
||||||
assert_eq!(response, expected_response);
|
assert_eq!(response, expected_response);
|
||||||
@ -293,7 +293,7 @@ async fn error_add_api_key_missing_parameter() {
|
|||||||
"message": "`expiresAt` field is mandatory.",
|
"message": "`expiresAt` field is mandatory.",
|
||||||
"code": "missing_parameter",
|
"code": "missing_parameter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link":"https://docs.meilisearch.com/errors#missing_parameter"
|
"link": "https://docs.meilisearch.com/errors#missing_parameter"
|
||||||
});
|
});
|
||||||
|
|
||||||
assert_eq!(response, expected_response);
|
assert_eq!(response, expected_response);
|
||||||
@ -316,7 +316,7 @@ async fn error_add_api_key_invalid_parameters_description() {
|
|||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": r#"description field value `{"name":"products"}` is invalid. It should be a string or specified as a null value."#,
|
"message": r#"`description` field value `{"name":"products"}` is invalid. It should be a string or specified as a null value."#,
|
||||||
"code": "invalid_api_key_description",
|
"code": "invalid_api_key_description",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_description"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_description"
|
||||||
@ -342,7 +342,7 @@ async fn error_add_api_key_invalid_parameters_indexes() {
|
|||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": r#"indexes field value `{"name":"products"}` is invalid. It should be an array of string representing index names."#,
|
"message": r#"`indexes` field value `{"name":"products"}` is invalid. It should be an array of string representing index names."#,
|
||||||
"code": "invalid_api_key_indexes",
|
"code": "invalid_api_key_indexes",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
|
||||||
@ -366,7 +366,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
|||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": r#"actions field value `{"name":"products"}` is invalid. It should be an array of string representing action names."#,
|
"message": r#"`actions` field value `{"name":"products"}` is invalid. It should be an array of string representing action names."#,
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
@ -386,7 +386,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
|||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": r#"actions field value `["doc.add"]` is invalid. It should be an array of string representing action names."#,
|
"message": r#"`actions` field value `["doc.add"]` is invalid. It should be an array of string representing action names."#,
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
@ -412,7 +412,7 @@ async fn error_add_api_key_invalid_parameters_expires_at() {
|
|||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": r#"expiresAt field value `{"name":"products"}` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'."#,
|
"message": r#"`expiresAt` field value `{"name":"products"}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'."#,
|
||||||
"code": "invalid_api_key_expires_at",
|
"code": "invalid_api_key_expires_at",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
|
||||||
@ -438,7 +438,7 @@ async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() {
|
|||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": r#"expiresAt field value `"2010-11-13T00:00:00Z"` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'."#,
|
"message": r#"`expiresAt` field value `"2010-11-13T00:00:00Z"` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'."#,
|
||||||
"code": "invalid_api_key_expires_at",
|
"code": "invalid_api_key_expires_at",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
|
||||||
@ -1213,7 +1213,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
|
|||||||
let (response, code) = server.patch_api_key(&key, content).await;
|
let (response, code) = server.patch_api_key(&key, content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "description field value `13` is invalid. It should be a string or specified as a null value.",
|
"message": "`description` field value `13` is invalid. It should be a string or specified as a null value.",
|
||||||
"code": "invalid_api_key_description",
|
"code": "invalid_api_key_description",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_description"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_description"
|
||||||
@ -1230,7 +1230,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
|
|||||||
let (response, code) = server.patch_api_key(&key, content).await;
|
let (response, code) = server.patch_api_key(&key, content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "indexes field value `13` is invalid. It should be an array of string representing index names.",
|
"message": "`indexes` field value `13` is invalid. It should be an array of string representing index names.",
|
||||||
"code": "invalid_api_key_indexes",
|
"code": "invalid_api_key_indexes",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
|
||||||
@ -1246,7 +1246,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
|
|||||||
let (response, code) = server.patch_api_key(&key, content).await;
|
let (response, code) = server.patch_api_key(&key, content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "actions field value `13` is invalid. It should be an array of string representing action names.",
|
"message": "`actions` field value `13` is invalid. It should be an array of string representing action names.",
|
||||||
"code": "invalid_api_key_actions",
|
"code": "invalid_api_key_actions",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||||
@ -1262,7 +1262,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() {
|
|||||||
let (response, code) = server.patch_api_key(&key, content).await;
|
let (response, code) = server.patch_api_key(&key, content).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "expiresAt field value `13` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'.",
|
"message": "`expiresAt` field value `13` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.",
|
||||||
"code": "invalid_api_key_expires_at",
|
"code": "invalid_api_key_expires_at",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at"
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use chrono::{Duration, Utc};
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::{hashmap, hashset};
|
use maplit::{hashmap, hashset};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||||
Lazy::new(|| {
|
Lazy::new(|| {
|
||||||
@ -76,7 +77,7 @@ async fn error_access_expired_key() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ALL_ACTIONS.clone(),
|
"actions": ALL_ACTIONS.clone(),
|
||||||
"expiresAt": (Utc::now() + Duration::seconds(1)),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
@ -106,7 +107,7 @@ async fn error_access_unauthorized_index() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["sales"],
|
"indexes": ["sales"],
|
||||||
"actions": ALL_ACTIONS.clone(),
|
"actions": ALL_ACTIONS.clone(),
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
@ -137,7 +138,7 @@ async fn error_access_unauthorized_action() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": [],
|
"actions": [],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
@ -174,7 +175,7 @@ async fn access_authorized_restricted_index() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": [],
|
"actions": [],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
@ -213,7 +214,7 @@ async fn access_authorized_no_index_restriction() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": [],
|
"actions": [],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
@ -263,7 +264,7 @@ async fn access_authorized_stats_restricted_index() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["stats.get"],
|
"actions": ["stats.get"],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 201);
|
||||||
@ -303,7 +304,7 @@ async fn access_authorized_stats_no_index_restriction() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ["stats.get"],
|
"actions": ["stats.get"],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 201);
|
||||||
@ -343,7 +344,7 @@ async fn list_authorized_indexes_restricted_index() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["indexes.get"],
|
"actions": ["indexes.get"],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 201);
|
||||||
@ -384,7 +385,7 @@ async fn list_authorized_indexes_no_index_restriction() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ["indexes.get"],
|
"actions": ["indexes.get"],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 201);
|
||||||
@ -424,7 +425,7 @@ async fn list_authorized_tasks_restricted_index() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["tasks.get"],
|
"actions": ["tasks.get"],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 201);
|
||||||
@ -464,7 +465,7 @@ async fn list_authorized_tasks_no_index_restriction() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ["tasks.get"],
|
"actions": ["tasks.get"],
|
||||||
"expiresAt": Utc::now() + Duration::hours(1),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
assert_eq!(code, 201);
|
assert_eq!(code, 201);
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use chrono::{Duration, Utc};
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||||
|
|
||||||
@ -63,22 +64,22 @@ static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
|||||||
json!({
|
json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ["*"],
|
"actions": ["*"],
|
||||||
"expiresAt": Utc::now() + Duration::days(1)
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
}),
|
}),
|
||||||
json!({
|
json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ["search"],
|
"actions": ["search"],
|
||||||
"expiresAt": Utc::now() + Duration::days(1)
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
}),
|
}),
|
||||||
json!({
|
json!({
|
||||||
"indexes": ["sales"],
|
"indexes": ["sales"],
|
||||||
"actions": ["*"],
|
"actions": ["*"],
|
||||||
"expiresAt": Utc::now() + Duration::days(1)
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
}),
|
}),
|
||||||
json!({
|
json!({
|
||||||
"indexes": ["sales"],
|
"indexes": ["sales"],
|
||||||
"actions": ["search"],
|
"actions": ["search"],
|
||||||
"expiresAt": Utc::now() + Duration::days(1)
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
}),
|
}),
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
@ -89,23 +90,23 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
|||||||
json!({
|
json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||||
"expiresAt": Utc::now() + Duration::days(1)
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
}),
|
}),
|
||||||
json!({
|
json!({
|
||||||
"indexes": ["sales"],
|
"indexes": ["sales"],
|
||||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||||
"expiresAt": Utc::now() + Duration::days(1)
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
}),
|
}),
|
||||||
// bad index
|
// bad index
|
||||||
json!({
|
json!({
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["*"],
|
"actions": ["*"],
|
||||||
"expiresAt": Utc::now() + Duration::days(1)
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
}),
|
}),
|
||||||
json!({
|
json!({
|
||||||
"indexes": ["products"],
|
"indexes": ["products"],
|
||||||
"actions": ["search"],
|
"actions": ["search"],
|
||||||
"expiresAt": Utc::now() + Duration::days(1)
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||||
}),
|
}),
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
@ -204,19 +205,19 @@ async fn search_authorized_simple_token() {
|
|||||||
let tenant_tokens = vec![
|
let tenant_tokens = vec![
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
@ -253,19 +254,19 @@ async fn search_authorized_filter_token() {
|
|||||||
let tenant_tokens = vec![
|
let tenant_tokens = vec![
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
// filter on sales should override filters on *
|
// filter on sales should override filters on *
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@ -273,28 +274,28 @@ async fn search_authorized_filter_token() {
|
|||||||
"*": {"filter": "color = green"},
|
"*": {"filter": "color = green"},
|
||||||
"sales": {"filter": "color = blue"}
|
"sales": {"filter": "color = blue"}
|
||||||
}),
|
}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({
|
"searchRules" => json!({
|
||||||
"*": {},
|
"*": {},
|
||||||
"sales": {"filter": "color = blue"}
|
"sales": {"filter": "color = blue"}
|
||||||
}),
|
}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({
|
"searchRules" => json!({
|
||||||
"*": {"filter": "color = green"},
|
"*": {"filter": "color = green"},
|
||||||
"sales": {"filter": ["color = blue"]}
|
"sales": {"filter": ["color = blue"]}
|
||||||
}),
|
}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({
|
"searchRules" => json!({
|
||||||
"*": {},
|
"*": {},
|
||||||
"sales": {"filter": ["color = blue"]}
|
"sales": {"filter": ["color = blue"]}
|
||||||
}),
|
}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -307,19 +308,19 @@ async fn filter_search_authorized_filter_token() {
|
|||||||
let tenant_tokens = vec![
|
let tenant_tokens = vec![
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
// filter on sales should override filters on *
|
// filter on sales should override filters on *
|
||||||
hashmap! {
|
hashmap! {
|
||||||
@ -327,28 +328,28 @@ async fn filter_search_authorized_filter_token() {
|
|||||||
"*": {"filter": "color = green"},
|
"*": {"filter": "color = green"},
|
||||||
"sales": {"filter": "color = blue"}
|
"sales": {"filter": "color = blue"}
|
||||||
}),
|
}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({
|
"searchRules" => json!({
|
||||||
"*": {},
|
"*": {},
|
||||||
"sales": {"filter": "color = blue"}
|
"sales": {"filter": "color = blue"}
|
||||||
}),
|
}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({
|
"searchRules" => json!({
|
||||||
"*": {"filter": "color = green"},
|
"*": {"filter": "color = green"},
|
||||||
"sales": {"filter": ["color = blue"]}
|
"sales": {"filter": ["color = blue"]}
|
||||||
}),
|
}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({
|
"searchRules" => json!({
|
||||||
"*": {},
|
"*": {},
|
||||||
"sales": {"filter": ["color = blue"]}
|
"sales": {"filter": ["color = blue"]}
|
||||||
}),
|
}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -361,27 +362,27 @@ async fn error_search_token_forbidden_parent_key() {
|
|||||||
let tenant_tokens = vec![
|
let tenant_tokens = vec![
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": Value::Null}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": Value::Null}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -395,11 +396,11 @@ async fn error_search_forbidden_token() {
|
|||||||
// bad index
|
// bad index
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["products"]),
|
"searchRules" => json!(["products"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"products": {}}),
|
"searchRules" => json!({"products": {}}),
|
||||||
@ -416,27 +417,27 @@ async fn error_search_forbidden_token() {
|
|||||||
// expired token
|
// expired token
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": {}}),
|
"searchRules" => json!({"*": {}}),
|
||||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"*": Value::Null}),
|
"searchRules" => json!({"*": Value::Null}),
|
||||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": {}}),
|
"searchRules" => json!({"sales": {}}),
|
||||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!({"sales": Value::Null}),
|
"searchRules" => json!({"sales": Value::Null}),
|
||||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
hashmap! {
|
hashmap! {
|
||||||
"searchRules" => json!(["sales"]),
|
"searchRules" => json!(["sales"]),
|
||||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -452,7 +453,7 @@ async fn error_access_forbidden_routes() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ["*"],
|
"actions": ["*"],
|
||||||
"expiresAt": (Utc::now() + Duration::hours(1)),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
@ -463,7 +464,7 @@ async fn error_access_forbidden_routes() {
|
|||||||
|
|
||||||
let tenant_token = hashmap! {
|
let tenant_token = hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
};
|
};
|
||||||
let web_token = generate_tenant_token(&key, tenant_token);
|
let web_token = generate_tenant_token(&key, tenant_token);
|
||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
@ -487,7 +488,7 @@ async fn error_access_expired_parent_key() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ["*"],
|
"actions": ["*"],
|
||||||
"expiresAt": (Utc::now() + Duration::seconds(1)),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
@ -498,7 +499,7 @@ async fn error_access_expired_parent_key() {
|
|||||||
|
|
||||||
let tenant_token = hashmap! {
|
let tenant_token = hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
};
|
};
|
||||||
let web_token = generate_tenant_token(&key, tenant_token);
|
let web_token = generate_tenant_token(&key, tenant_token);
|
||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
@ -529,7 +530,7 @@ async fn error_access_modified_token() {
|
|||||||
let content = json!({
|
let content = json!({
|
||||||
"indexes": ["*"],
|
"indexes": ["*"],
|
||||||
"actions": ["*"],
|
"actions": ["*"],
|
||||||
"expiresAt": (Utc::now() + Duration::hours(1)),
|
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||||
});
|
});
|
||||||
|
|
||||||
let (response, code) = server.add_api_key(content).await;
|
let (response, code) = server.add_api_key(content).await;
|
||||||
@ -540,7 +541,7 @@ async fn error_access_modified_token() {
|
|||||||
|
|
||||||
let tenant_token = hashmap! {
|
let tenant_token = hashmap! {
|
||||||
"searchRules" => json!(["products"]),
|
"searchRules" => json!(["products"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
};
|
};
|
||||||
let web_token = generate_tenant_token(&key, tenant_token);
|
let web_token = generate_tenant_token(&key, tenant_token);
|
||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
@ -554,7 +555,7 @@ async fn error_access_modified_token() {
|
|||||||
|
|
||||||
let tenant_token = hashmap! {
|
let tenant_token = hashmap! {
|
||||||
"searchRules" => json!(["*"]),
|
"searchRules" => json!(["*"]),
|
||||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||||
};
|
};
|
||||||
|
|
||||||
let alt = generate_tenant_token(&key, tenant_token);
|
let alt = generate_tenant_token(&key, tenant_token);
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use chrono::DateTime;
|
|
||||||
use meilisearch_http::{analytics, create_app};
|
use meilisearch_http::{analytics, create_app};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||||
|
|
||||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -568,9 +568,9 @@ async fn add_documents_no_index_creation() {
|
|||||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||||
|
|
||||||
let processed_at =
|
let processed_at =
|
||||||
DateTime::parse_from_rfc3339(response["finishedAt"].as_str().unwrap()).unwrap();
|
OffsetDateTime::parse(response["finishedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
let enqueued_at =
|
let enqueued_at =
|
||||||
DateTime::parse_from_rfc3339(response["enqueuedAt"].as_str().unwrap()).unwrap();
|
OffsetDateTime::parse(response["enqueuedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
assert!(processed_at > enqueued_at);
|
assert!(processed_at > enqueued_at);
|
||||||
|
|
||||||
// index was created, and primary key was infered.
|
// index was created, and primary key was infered.
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use chrono::DateTime;
|
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn update_primary_key() {
|
async fn update_primary_key() {
|
||||||
@ -25,8 +25,10 @@ async fn update_primary_key() {
|
|||||||
assert!(response.get("createdAt").is_some());
|
assert!(response.get("createdAt").is_some());
|
||||||
assert!(response.get("updatedAt").is_some());
|
assert!(response.get("updatedAt").is_some());
|
||||||
|
|
||||||
let created_at = DateTime::parse_from_rfc3339(response["createdAt"].as_str().unwrap()).unwrap();
|
let created_at =
|
||||||
let updated_at = DateTime::parse_from_rfc3339(response["updatedAt"].as_str().unwrap()).unwrap();
|
OffsetDateTime::parse(response["createdAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
|
let updated_at =
|
||||||
|
OffsetDateTime::parse(response["updatedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
assert!(created_at < updated_at);
|
assert!(created_at < updated_at);
|
||||||
|
|
||||||
assert_eq!(response["primaryKey"], "primary");
|
assert_eq!(response["primaryKey"], "primary");
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
|
||||||
@ -57,11 +58,15 @@ async fn stats() {
|
|||||||
|
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let timestamp = OffsetDateTime::now_utc();
|
||||||
let (response, code) = server.stats().await;
|
let (response, code) = server.stats().await;
|
||||||
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert!(response["databaseSize"].as_u64().unwrap() > 0);
|
assert!(response["databaseSize"].as_u64().unwrap() > 0);
|
||||||
assert!(response.get("lastUpdate").is_some());
|
let last_update =
|
||||||
|
OffsetDateTime::parse(response["lastUpdate"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
|
assert!(last_update - timestamp < time::Duration::SECOND);
|
||||||
|
|
||||||
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 2);
|
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 2);
|
||||||
assert!(response["indexes"]["test"]["isIndexing"] == false);
|
assert!(response["indexes"]["test"]["isIndexing"] == false);
|
||||||
assert_eq!(response["indexes"]["test"]["fieldDistribution"]["id"], 2);
|
assert_eq!(response["indexes"]["test"]["fieldDistribution"]["id"], 2);
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_get_task_unexisting_index() {
|
async fn error_get_task_unexisting_index() {
|
||||||
@ -98,7 +99,8 @@ macro_rules! assert_valid_summarized_task {
|
|||||||
assert_eq!($response["status"], "enqueued");
|
assert_eq!($response["status"], "enqueued");
|
||||||
assert_eq!($response["type"], $task_type);
|
assert_eq!($response["type"], $task_type);
|
||||||
let date = $response["enqueuedAt"].as_str().expect("missing date");
|
let date = $response["enqueuedAt"].as_str().expect("missing date");
|
||||||
date.parse::<DateTime<Utc>>().unwrap();
|
|
||||||
|
OffsetDateTime::parse(date, &Rfc3339).unwrap();
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,62 +6,60 @@ edition = "2021"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4", default-features = false }
|
actix-web = { version = "4.0.1", default-features = false }
|
||||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
anyhow = { version = "1.0.56", features = ["backtrace"] }
|
||||||
async-stream = "0.3.2"
|
async-stream = "0.3.3"
|
||||||
async-trait = "0.1.51"
|
async-trait = "0.1.52"
|
||||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
|
atomic_refcell = "0.1.8"
|
||||||
|
byte-unit = { version = "4.0.14", default-features = false, features = ["std"] }
|
||||||
bytes = "1.1.0"
|
bytes = "1.1.0"
|
||||||
chrono = { version = "0.4.19", features = ["serde"] }
|
clap = { version = "3.1.6", features = ["derive", "env"] }
|
||||||
|
crossbeam-channel = "0.5.2"
|
||||||
csv = "1.1.6"
|
csv = "1.1.6"
|
||||||
crossbeam-channel = "0.5.1"
|
derivative = "2.2.0"
|
||||||
either = "1.6.1"
|
either = "1.6.1"
|
||||||
flate2 = "1.0.21"
|
flate2 = "1.0.22"
|
||||||
|
fs_extra = "1.2.0"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
futures = "0.3.17"
|
futures = "0.3.21"
|
||||||
futures-util = "0.3.17"
|
futures-util = "0.3.21"
|
||||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
http = "0.2.6"
|
||||||
http = "0.2.4"
|
indexmap = { version = "1.8.0", features = ["serde-1"] }
|
||||||
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
itertools = "0.10.3"
|
||||||
itertools = "0.10.1"
|
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.14"
|
log = "0.4.14"
|
||||||
meilisearch-error = { path = "../meilisearch-error" }
|
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.22.1" }
|
meilisearch-error = { path = "../meilisearch-error" }
|
||||||
|
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.24.0" }
|
||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
num_cpus = "1.13.0"
|
num_cpus = "1.13.1"
|
||||||
once_cell = "1.8.0"
|
obkv = "0.2.0"
|
||||||
parking_lot = "0.11.2"
|
once_cell = "1.10.0"
|
||||||
rand = "0.8.4"
|
parking_lot = "0.12.0"
|
||||||
|
rand = "0.8.5"
|
||||||
rayon = "1.5.1"
|
rayon = "1.5.1"
|
||||||
regex = "1.5.4"
|
regex = "1.5.5"
|
||||||
rustls = "0.19.1"
|
reqwest = { version = "0.11.9", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
||||||
serde = { version = "1.0.130", features = ["derive"] }
|
rustls = "0.20.4"
|
||||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
serde = { version = "1.0.136", features = ["derive"] }
|
||||||
siphasher = "0.3.7"
|
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||||
slice-group-by = "0.2.6"
|
siphasher = "0.3.10"
|
||||||
clap = { version = "3.0", features = ["derive", "env"] }
|
slice-group-by = "0.3.0"
|
||||||
tar = "0.4.37"
|
sysinfo = "0.23.5"
|
||||||
tempfile = "3.2.0"
|
tar = "0.4.38"
|
||||||
thiserror = "1.0.28"
|
tempfile = "3.3.0"
|
||||||
tokio = { version = "1.11.0", features = ["full"] }
|
thiserror = "1.0.30"
|
||||||
|
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
|
tokio = { version = "1.17.0", features = ["full"] }
|
||||||
uuid = { version = "0.8.2", features = ["serde"] }
|
uuid = { version = "0.8.2", features = ["serde"] }
|
||||||
walkdir = "2.3.2"
|
walkdir = "2.3.2"
|
||||||
obkv = "0.2.0"
|
whoami = { version = "1.2.1", optional = true }
|
||||||
pin-project = "1.0.8"
|
|
||||||
whoami = { version = "1.1.3", optional = true }
|
|
||||||
reqwest = { version = "0.11.4", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
|
||||||
sysinfo = "0.20.2"
|
|
||||||
derivative = "2.2.0"
|
|
||||||
fs_extra = "1.2.0"
|
|
||||||
atomic_refcell = "0.1.8"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.2.0"
|
actix-rt = "2.7.0"
|
||||||
mockall = "0.10.2"
|
|
||||||
paste = "1.0.5"
|
|
||||||
nelson = { git = "https://github.com/MarinPostma/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
|
||||||
meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] }
|
meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] }
|
||||||
|
mockall = "0.11.0"
|
||||||
|
nelson = { git = "https://github.com/MarinPostma/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
||||||
|
paste = "1.0.6"
|
||||||
proptest = "1.0.0"
|
proptest = "1.0.0"
|
||||||
proptest-derive = "0.3.0"
|
proptest-derive = "0.3.0"
|
||||||
|
@ -3,9 +3,9 @@ use std::io::{BufReader, Seek, SeekFrom, Write};
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use heed::{EnvOpenOptions, RoTxn};
|
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use milli::documents::DocumentBatchReader;
|
use milli::documents::DocumentBatchReader;
|
||||||
|
use milli::heed::{EnvOpenOptions, RoTxn};
|
||||||
use milli::update::{IndexDocumentsConfig, IndexerConfig};
|
use milli::update::{IndexDocumentsConfig, IndexerConfig};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ pub enum IndexError {
|
|||||||
|
|
||||||
internal_error!(
|
internal_error!(
|
||||||
IndexError: std::io::Error,
|
IndexError: std::io::Error,
|
||||||
heed::Error,
|
milli::heed::Error,
|
||||||
fst::Error,
|
fst::Error,
|
||||||
serde_json::Error,
|
serde_json::Error,
|
||||||
update_file_store::UpdateFileStoreError,
|
update_file_store::UpdateFileStoreError,
|
||||||
|
@ -5,12 +5,12 @@ use std::ops::Deref;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
use milli::heed::{EnvOpenOptions, RoTxn};
|
||||||
use heed::{EnvOpenOptions, RoTxn};
|
|
||||||
use milli::update::{IndexerConfig, Setting};
|
use milli::update::{IndexerConfig, Setting};
|
||||||
use milli::{obkv_to_json, FieldDistribution, FieldId};
|
use milli::{obkv_to_json, FieldDistribution, FieldId};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::EnvSizer;
|
use crate::EnvSizer;
|
||||||
@ -24,8 +24,10 @@ pub type Document = Map<String, Value>;
|
|||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct IndexMeta {
|
pub struct IndexMeta {
|
||||||
pub created_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
pub updated_at: DateTime<Utc>,
|
pub created_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub updated_at: OffsetDateTime,
|
||||||
pub primary_key: Option<String>,
|
pub primary_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,7 +37,7 @@ impl IndexMeta {
|
|||||||
Self::new_txn(index, &txn)
|
Self::new_txn(index, &txn)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_txn(index: &Index, txn: &heed::RoTxn) -> Result<Self> {
|
pub fn new_txn(index: &Index, txn: &milli::heed::RoTxn) -> Result<Self> {
|
||||||
let created_at = index.created_at(txn)?;
|
let created_at = index.created_at(txn)?;
|
||||||
let updated_at = index.updated_at(txn)?;
|
let updated_at = index.updated_at(txn)?;
|
||||||
let primary_key = index.primary_key(txn)?.map(String::from);
|
let primary_key = index.primary_key(txn)?.map(String::from);
|
||||||
@ -248,7 +250,7 @@ impl Index {
|
|||||||
|
|
||||||
fn fields_to_display<S: AsRef<str>>(
|
fn fields_to_display<S: AsRef<str>>(
|
||||||
&self,
|
&self,
|
||||||
txn: &heed::RoTxn,
|
txn: &milli::heed::RoTxn,
|
||||||
attributes_to_retrieve: &Option<Vec<S>>,
|
attributes_to_retrieve: &Option<Vec<S>>,
|
||||||
fields_ids_map: &milli::FieldsIdsMap,
|
fields_ids_map: &milli::FieldsIdsMap,
|
||||||
) -> Result<Vec<FieldId>> {
|
) -> Result<Vec<FieldId>> {
|
||||||
@ -276,7 +278,7 @@ impl Index {
|
|||||||
let _txn = self.write_txn()?;
|
let _txn = self.write_txn()?;
|
||||||
self.inner
|
self.inner
|
||||||
.env
|
.env
|
||||||
.copy_to_path(dst, heed::CompactionOption::Enabled)?;
|
.copy_to_path(dst, milli::heed::CompactionOption::Enabled)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -176,7 +176,7 @@ pub struct Facets {
|
|||||||
impl Index {
|
impl Index {
|
||||||
fn update_primary_key_txn<'a, 'b>(
|
fn update_primary_key_txn<'a, 'b>(
|
||||||
&'a self,
|
&'a self,
|
||||||
txn: &mut heed::RwTxn<'a, 'b>,
|
txn: &mut milli::heed::RwTxn<'a, 'b>,
|
||||||
primary_key: String,
|
primary_key: String,
|
||||||
) -> Result<IndexMeta> {
|
) -> Result<IndexMeta> {
|
||||||
let mut builder = milli::update::Settings::new(txn, self, self.indexer_config.as_ref());
|
let mut builder = milli::update::Settings::new(txn, self, self.indexer_config.as_ref());
|
||||||
|
@ -3,9 +3,10 @@ use std::path::{Path, PathBuf};
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use async_stream::stream;
|
use async_stream::stream;
|
||||||
use chrono::Utc;
|
|
||||||
use futures::{lock::Mutex, stream::StreamExt};
|
use futures::{lock::Mutex, stream::StreamExt};
|
||||||
use log::{error, trace};
|
use log::{error, trace};
|
||||||
|
use time::macros::format_description;
|
||||||
|
use time::OffsetDateTime;
|
||||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||||
|
|
||||||
use super::error::{DumpActorError, Result};
|
use super::error::{DumpActorError, Result};
|
||||||
@ -29,7 +30,11 @@ pub struct DumpActor {
|
|||||||
|
|
||||||
/// Generate uid from creation date
|
/// Generate uid from creation date
|
||||||
fn generate_uid() -> String {
|
fn generate_uid() -> String {
|
||||||
Utc::now().format("%Y%m%d-%H%M%S%3f").to_string()
|
OffsetDateTime::now_utc()
|
||||||
|
.format(format_description!(
|
||||||
|
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||||
|
))
|
||||||
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DumpActor {
|
impl DumpActor {
|
||||||
@ -154,3 +159,33 @@ impl DumpActor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_generate_uid() {
|
||||||
|
let current = OffsetDateTime::now_utc();
|
||||||
|
|
||||||
|
let uid = generate_uid();
|
||||||
|
let (date, time) = uid.split_once('-').unwrap();
|
||||||
|
|
||||||
|
let date = time::Date::parse(
|
||||||
|
date,
|
||||||
|
&format_description!("[year repr:full][month repr:numerical][day padding:zero]"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let time = time::Time::parse(
|
||||||
|
time,
|
||||||
|
&format_description!(
|
||||||
|
"[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let datetime = time::PrimitiveDateTime::new(date, time);
|
||||||
|
let datetime = datetime.assume_utc();
|
||||||
|
|
||||||
|
assert!(current - datetime < time::Duration::SECOND);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use meilisearch_error::Code;
|
use meilisearch_error::Code;
|
||||||
use milli::update::IndexDocumentsMethod;
|
use milli::update::IndexDocumentsMethod;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::index::{Settings, Unchecked};
|
use crate::index::{Settings, Unchecked};
|
||||||
@ -51,7 +51,8 @@ pub enum UpdateMeta {
|
|||||||
pub struct Enqueued {
|
pub struct Enqueued {
|
||||||
pub update_id: u64,
|
pub update_id: u64,
|
||||||
pub meta: UpdateMeta,
|
pub meta: UpdateMeta,
|
||||||
pub enqueued_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub enqueued_at: OffsetDateTime,
|
||||||
pub content: Option<Uuid>,
|
pub content: Option<Uuid>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -59,7 +60,8 @@ pub struct Enqueued {
|
|||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct Processed {
|
pub struct Processed {
|
||||||
pub success: UpdateResult,
|
pub success: UpdateResult,
|
||||||
pub processed_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub processed_at: OffsetDateTime,
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub from: Processing,
|
pub from: Processing,
|
||||||
}
|
}
|
||||||
@ -69,7 +71,8 @@ pub struct Processed {
|
|||||||
pub struct Processing {
|
pub struct Processing {
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub from: Enqueued,
|
pub from: Enqueued,
|
||||||
pub started_processing_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub started_processing_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
@ -77,7 +80,8 @@ pub struct Processing {
|
|||||||
pub struct Aborted {
|
pub struct Aborted {
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub from: Enqueued,
|
pub from: Enqueued,
|
||||||
pub aborted_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub aborted_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
@ -86,7 +90,8 @@ pub struct Failed {
|
|||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub from: Processing,
|
pub from: Processing,
|
||||||
pub error: ResponseError,
|
pub error: ResponseError,
|
||||||
pub failed_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub failed_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use chrono::{DateTime, Utc};
|
|
||||||
use meilisearch_error::{Code, ResponseError};
|
use meilisearch_error::{Code, ResponseError};
|
||||||
use milli::update::IndexDocumentsMethod;
|
use milli::update::IndexDocumentsMethod;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::index::{Settings, Unchecked};
|
use crate::index::{Settings, Unchecked};
|
||||||
@ -107,7 +107,8 @@ pub enum UpdateMeta {
|
|||||||
pub struct Enqueued {
|
pub struct Enqueued {
|
||||||
pub update_id: u64,
|
pub update_id: u64,
|
||||||
pub meta: Update,
|
pub meta: Update,
|
||||||
pub enqueued_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub enqueued_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Enqueued {
|
impl Enqueued {
|
||||||
@ -122,7 +123,8 @@ impl Enqueued {
|
|||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct Processed {
|
pub struct Processed {
|
||||||
pub success: v2::UpdateResult,
|
pub success: v2::UpdateResult,
|
||||||
pub processed_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub processed_at: OffsetDateTime,
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub from: Processing,
|
pub from: Processing,
|
||||||
}
|
}
|
||||||
@ -144,7 +146,8 @@ impl Processed {
|
|||||||
pub struct Processing {
|
pub struct Processing {
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub from: Enqueued,
|
pub from: Enqueued,
|
||||||
pub started_processing_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub started_processing_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Processing {
|
impl Processing {
|
||||||
@ -163,7 +166,8 @@ pub struct Failed {
|
|||||||
pub from: Processing,
|
pub from: Processing,
|
||||||
pub msg: String,
|
pub msg: String,
|
||||||
pub code: Code,
|
pub code: Code,
|
||||||
pub failed_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub failed_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Failed {
|
impl Failed {
|
||||||
|
@ -18,7 +18,7 @@ pub enum DumpActorError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
internal_error!(
|
internal_error!(
|
||||||
DumpActorError: heed::Error,
|
DumpActorError: milli::heed::Error,
|
||||||
std::io::Error,
|
std::io::Error,
|
||||||
tokio::task::JoinError,
|
tokio::task::JoinError,
|
||||||
tokio::sync::oneshot::error::RecvError,
|
tokio::sync::oneshot::error::RecvError,
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use heed::EnvOpenOptions;
|
|
||||||
use log::info;
|
use log::info;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
|
use milli::heed::EnvOpenOptions;
|
||||||
|
|
||||||
use crate::analytics;
|
use crate::analytics;
|
||||||
use crate::index_controller::dump_actor::Metadata;
|
use crate::index_controller::dump_actor::Metadata;
|
||||||
|
@ -3,9 +3,9 @@ use std::path::{Path, PathBuf};
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use log::{info, trace};
|
use log::{info, trace};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
pub use actor::DumpActor;
|
pub use actor::DumpActor;
|
||||||
pub use handle_impl::*;
|
pub use handle_impl::*;
|
||||||
@ -40,7 +40,8 @@ pub struct Metadata {
|
|||||||
db_version: String,
|
db_version: String,
|
||||||
index_db_size: usize,
|
index_db_size: usize,
|
||||||
update_db_size: usize,
|
update_db_size: usize,
|
||||||
dump_date: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
dump_date: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Metadata {
|
impl Metadata {
|
||||||
@ -49,7 +50,7 @@ impl Metadata {
|
|||||||
db_version: env!("CARGO_PKG_VERSION").to_string(),
|
db_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||||
index_db_size,
|
index_db_size,
|
||||||
update_db_size,
|
update_db_size,
|
||||||
dump_date: Utc::now(),
|
dump_date: OffsetDateTime::now_utc(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -144,7 +145,7 @@ impl MetadataVersion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dump_date(&self) -> Option<&DateTime<Utc>> {
|
pub fn dump_date(&self) -> Option<&OffsetDateTime> {
|
||||||
match self {
|
match self {
|
||||||
MetadataVersion::V1(_) => None,
|
MetadataVersion::V1(_) => None,
|
||||||
MetadataVersion::V2(meta) | MetadataVersion::V3(meta) | MetadataVersion::V4(meta) => {
|
MetadataVersion::V2(meta) | MetadataVersion::V3(meta) | MetadataVersion::V4(meta) => {
|
||||||
@ -169,9 +170,13 @@ pub struct DumpInfo {
|
|||||||
pub status: DumpStatus,
|
pub status: DumpStatus,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub error: Option<String>,
|
pub error: Option<String>,
|
||||||
started_at: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
started_at: OffsetDateTime,
|
||||||
finished_at: Option<DateTime<Utc>>,
|
#[serde(
|
||||||
|
skip_serializing_if = "Option::is_none",
|
||||||
|
with = "time::serde::rfc3339::option"
|
||||||
|
)]
|
||||||
|
finished_at: Option<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DumpInfo {
|
impl DumpInfo {
|
||||||
@ -180,19 +185,19 @@ impl DumpInfo {
|
|||||||
uid,
|
uid,
|
||||||
status,
|
status,
|
||||||
error: None,
|
error: None,
|
||||||
started_at: Utc::now(),
|
started_at: OffsetDateTime::now_utc(),
|
||||||
finished_at: None,
|
finished_at: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_error(&mut self, error: String) {
|
pub fn with_error(&mut self, error: String) {
|
||||||
self.status = DumpStatus::Failed;
|
self.status = DumpStatus::Failed;
|
||||||
self.finished_at = Some(Utc::now());
|
self.finished_at = Some(OffsetDateTime::now_utc());
|
||||||
self.error = Some(error);
|
self.error = Some(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn done(&mut self) {
|
pub fn done(&mut self) {
|
||||||
self.finished_at = Some(Utc::now());
|
self.finished_at = Some(OffsetDateTime::now_utc());
|
||||||
self.status = DumpStatus::Done;
|
self.status = DumpStatus::Done;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,11 +8,11 @@ use std::time::Duration;
|
|||||||
|
|
||||||
use actix_web::error::PayloadError;
|
use actix_web::error::PayloadError;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use futures::Stream;
|
use futures::Stream;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use milli::update::IndexDocumentsMethod;
|
use milli::update::IndexDocumentsMethod;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
use tokio::sync::{mpsc, RwLock};
|
use tokio::sync::{mpsc, RwLock};
|
||||||
use tokio::task::spawn_blocking;
|
use tokio::task::spawn_blocking;
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
@ -48,8 +48,8 @@ pub type Payload = Box<
|
|||||||
dyn Stream<Item = std::result::Result<Bytes, PayloadError>> + Send + Sync + 'static + Unpin,
|
dyn Stream<Item = std::result::Result<Bytes, PayloadError>> + Send + Sync + 'static + Unpin,
|
||||||
>;
|
>;
|
||||||
|
|
||||||
pub fn open_meta_env(path: &Path, size: usize) -> heed::Result<heed::Env> {
|
pub fn open_meta_env(path: &Path, size: usize) -> milli::heed::Result<milli::heed::Env> {
|
||||||
let mut options = heed::EnvOpenOptions::new();
|
let mut options = milli::heed::EnvOpenOptions::new();
|
||||||
options.map_size(size);
|
options.map_size(size);
|
||||||
options.max_dbs(20);
|
options.max_dbs(20);
|
||||||
options.open(path)
|
options.open(path)
|
||||||
@ -114,7 +114,8 @@ impl fmt::Display for DocumentAdditionFormat {
|
|||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct Stats {
|
pub struct Stats {
|
||||||
pub database_size: u64,
|
pub database_size: u64,
|
||||||
pub last_update: Option<DateTime<Utc>>,
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||||
|
pub last_update: Option<OffsetDateTime>,
|
||||||
pub indexes: BTreeMap<String, IndexStats>,
|
pub indexes: BTreeMap<String, IndexStats>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -582,7 +583,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> {
|
pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> {
|
||||||
let mut last_task: Option<DateTime<_>> = None;
|
let mut last_task: Option<OffsetDateTime> = None;
|
||||||
let mut indexes = BTreeMap::new();
|
let mut indexes = BTreeMap::new();
|
||||||
let mut database_size = 0;
|
let mut database_size = 0;
|
||||||
let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?;
|
let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?;
|
||||||
|
@ -45,7 +45,7 @@ impl From<OneshotRecvError> for IndexResolverError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
internal_error!(
|
internal_error!(
|
||||||
IndexResolverError: heed::Error,
|
IndexResolverError: milli::heed::Error,
|
||||||
uuid::Error,
|
uuid::Error,
|
||||||
std::io::Error,
|
std::io::Error,
|
||||||
tokio::task::JoinError,
|
tokio::task::JoinError,
|
||||||
|
@ -4,8 +4,8 @@ use std::io::{BufRead, BufReader, Write};
|
|||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use heed::types::{SerdeBincode, Str};
|
use milli::heed::types::{SerdeBincode, Str};
|
||||||
use heed::{CompactionOption, Database, Env};
|
use milli::heed::{CompactionOption, Database, Env};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
@ -56,7 +56,7 @@ impl Drop for HeedMetaStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl HeedMetaStore {
|
impl HeedMetaStore {
|
||||||
pub fn new(env: Arc<heed::Env>) -> Result<Self> {
|
pub fn new(env: Arc<milli::heed::Env>) -> Result<Self> {
|
||||||
let db = env.create_database(Some("uuids"))?;
|
let db = env.create_database(Some("uuids"))?;
|
||||||
Ok(Self { env, db })
|
Ok(Self { env, db })
|
||||||
}
|
}
|
||||||
@ -153,7 +153,7 @@ impl HeedMetaStore {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_dump(src: impl AsRef<Path>, env: Arc<heed::Env>) -> Result<()> {
|
pub fn load_dump(src: impl AsRef<Path>, env: Arc<milli::heed::Env>) -> Result<()> {
|
||||||
let src_indexes = src.as_ref().join(UUIDS_DB_PATH).join("data.jsonl");
|
let src_indexes = src.as_ref().join(UUIDS_DB_PATH).join("data.jsonl");
|
||||||
let indexes = File::open(&src_indexes)?;
|
let indexes = File::open(&src_indexes)?;
|
||||||
let mut indexes = BufReader::new(indexes);
|
let mut indexes = BufReader::new(indexes);
|
||||||
|
@ -6,14 +6,14 @@ use std::convert::{TryFrom, TryInto};
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chrono::Utc;
|
|
||||||
use error::{IndexResolverError, Result};
|
use error::{IndexResolverError, Result};
|
||||||
use heed::Env;
|
|
||||||
use index_store::{IndexStore, MapIndexStore};
|
use index_store::{IndexStore, MapIndexStore};
|
||||||
use meilisearch_error::ResponseError;
|
use meilisearch_error::ResponseError;
|
||||||
use meta_store::{HeedMetaStore, IndexMetaStore};
|
use meta_store::{HeedMetaStore, IndexMetaStore};
|
||||||
|
use milli::heed::Env;
|
||||||
use milli::update::{DocumentDeletionResult, IndexerConfig};
|
use milli::update::{DocumentDeletionResult, IndexerConfig};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
use tokio::sync::oneshot;
|
use tokio::sync::oneshot;
|
||||||
use tokio::task::spawn_blocking;
|
use tokio::task::spawn_blocking;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
@ -39,7 +39,7 @@ pub fn create_index_resolver(
|
|||||||
path: impl AsRef<Path>,
|
path: impl AsRef<Path>,
|
||||||
index_size: usize,
|
index_size: usize,
|
||||||
indexer_opts: &IndexerOpts,
|
indexer_opts: &IndexerOpts,
|
||||||
meta_env: Arc<heed::Env>,
|
meta_env: Arc<milli::heed::Env>,
|
||||||
file_store: UpdateFileStore,
|
file_store: UpdateFileStore,
|
||||||
) -> anyhow::Result<HardStateIndexResolver> {
|
) -> anyhow::Result<HardStateIndexResolver> {
|
||||||
let uuid_store = HeedMetaStore::new(meta_env)?;
|
let uuid_store = HeedMetaStore::new(meta_env)?;
|
||||||
@ -115,18 +115,19 @@ where
|
|||||||
self.process_document_addition_batch(batch).await
|
self.process_document_addition_batch(batch).await
|
||||||
} else {
|
} else {
|
||||||
if let Some(task) = batch.tasks.first_mut() {
|
if let Some(task) = batch.tasks.first_mut() {
|
||||||
task.events.push(TaskEvent::Processing(Utc::now()));
|
task.events
|
||||||
|
.push(TaskEvent::Processing(OffsetDateTime::now_utc()));
|
||||||
|
|
||||||
match self.process_task(task).await {
|
match self.process_task(task).await {
|
||||||
Ok(success) => {
|
Ok(success) => {
|
||||||
task.events.push(TaskEvent::Succeded {
|
task.events.push(TaskEvent::Succeded {
|
||||||
result: success,
|
result: success,
|
||||||
timestamp: Utc::now(),
|
timestamp: OffsetDateTime::now_utc(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Err(err) => task.events.push(TaskEvent::Failed {
|
Err(err) => task.events.push(TaskEvent::Failed {
|
||||||
error: err.into(),
|
error: err.into(),
|
||||||
timestamp: Utc::now(),
|
timestamp: OffsetDateTime::now_utc(),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -225,7 +226,7 @@ where
|
|||||||
|
|
||||||
// If the index doesn't exist and we are not allowed to create it with the first
|
// If the index doesn't exist and we are not allowed to create it with the first
|
||||||
// task, we must fails the whole batch.
|
// task, we must fails the whole batch.
|
||||||
let now = Utc::now();
|
let now = OffsetDateTime::now_utc();
|
||||||
let index = match index {
|
let index = match index {
|
||||||
Ok(index) => index,
|
Ok(index) => index,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@ -253,17 +254,17 @@ where
|
|||||||
|
|
||||||
let event = match result {
|
let event = match result {
|
||||||
Ok(Ok(result)) => TaskEvent::Succeded {
|
Ok(Ok(result)) => TaskEvent::Succeded {
|
||||||
timestamp: Utc::now(),
|
timestamp: OffsetDateTime::now_utc(),
|
||||||
result: TaskResult::DocumentAddition {
|
result: TaskResult::DocumentAddition {
|
||||||
indexed_documents: result.indexed_documents,
|
indexed_documents: result.indexed_documents,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Ok(Err(e)) => TaskEvent::Failed {
|
Ok(Err(e)) => TaskEvent::Failed {
|
||||||
timestamp: Utc::now(),
|
timestamp: OffsetDateTime::now_utc(),
|
||||||
error: e.into(),
|
error: e.into(),
|
||||||
},
|
},
|
||||||
Err(e) => TaskEvent::Failed {
|
Err(e) => TaskEvent::Failed {
|
||||||
timestamp: Utc::now(),
|
timestamp: OffsetDateTime::now_utc(),
|
||||||
error: IndexResolverError::from(e).into(),
|
error: IndexResolverError::from(e).into(),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@ -524,7 +525,7 @@ mod test {
|
|||||||
};
|
};
|
||||||
if primary_key.is_some() {
|
if primary_key.is_some() {
|
||||||
mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key")
|
mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key")
|
||||||
.then(move |_| Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None }));
|
.then(move |_| Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None }));
|
||||||
}
|
}
|
||||||
mocker.when::<(IndexDocumentsMethod, Option<String>, UpdateFileStore, IntoIter<Uuid>), IndexResult<DocumentAdditionResult>>("update_documents")
|
mocker.when::<(IndexDocumentsMethod, Option<String>, UpdateFileStore, IntoIter<Uuid>), IndexResult<DocumentAdditionResult>>("update_documents")
|
||||||
.then(move |(_, _, _, _)| result());
|
.then(move |(_, _, _, _)| result());
|
||||||
@ -569,7 +570,7 @@ mod test {
|
|||||||
| TaskContent::IndexCreation { primary_key } => {
|
| TaskContent::IndexCreation { primary_key } => {
|
||||||
if primary_key.is_some() {
|
if primary_key.is_some() {
|
||||||
let result = move || if !index_op_fails {
|
let result = move || if !index_op_fails {
|
||||||
Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None })
|
Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None })
|
||||||
} else {
|
} else {
|
||||||
// return this error because it's easy to generate...
|
// return this error because it's easy to generate...
|
||||||
Err(IndexError::DocumentNotFound("a doc".into()))
|
Err(IndexError::DocumentNotFound("a doc".into()))
|
||||||
@ -640,7 +641,7 @@ mod test {
|
|||||||
let update_file_store = UpdateFileStore::mock(mocker);
|
let update_file_store = UpdateFileStore::mock(mocker);
|
||||||
let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store);
|
let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store);
|
||||||
|
|
||||||
let batch = Batch { id: 1, created_at: Utc::now(), tasks: vec![task.clone()] };
|
let batch = Batch { id: 1, created_at: OffsetDateTime::now_utc(), tasks: vec![task.clone()] };
|
||||||
let result = index_resolver.process_batch(batch).await;
|
let result = index_resolver.process_batch(batch).await;
|
||||||
|
|
||||||
// Test for some expected output scenarios:
|
// Test for some expected output scenarios:
|
||||||
|
@ -13,8 +13,8 @@ mod update_file_store;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
pub use index_controller::MeiliSearch;
|
pub use index_controller::MeiliSearch;
|
||||||
|
|
||||||
pub use milli;
|
pub use milli;
|
||||||
|
pub use milli::heed;
|
||||||
|
|
||||||
mod compression;
|
mod compression;
|
||||||
pub mod document_formats;
|
pub mod document_formats;
|
||||||
@ -25,7 +25,7 @@ pub trait EnvSizer {
|
|||||||
fn size(&self) -> u64;
|
fn size(&self) -> u64;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EnvSizer for heed::Env {
|
impl EnvSizer for milli::heed::Env {
|
||||||
fn size(&self) -> u64 {
|
fn size(&self) -> u64 {
|
||||||
WalkDir::new(self.path())
|
WalkDir::new(self.path())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -48,24 +48,24 @@ pub struct IndexerOpts {
|
|||||||
pub struct SchedulerConfig {
|
pub struct SchedulerConfig {
|
||||||
/// enable the autobatching experimental feature
|
/// enable the autobatching experimental feature
|
||||||
#[clap(long, hide = true)]
|
#[clap(long, hide = true)]
|
||||||
pub enable_autobatching: bool,
|
pub enable_auto_batching: bool,
|
||||||
|
|
||||||
// The maximum number of updates of the same type that can be batched together.
|
// The maximum number of updates of the same type that can be batched together.
|
||||||
// If unspecified, this is unlimited. A value of 0 is interpreted as 1.
|
// If unspecified, this is unlimited. A value of 0 is interpreted as 1.
|
||||||
#[clap(long, requires = "enable-autobatching", hide = true)]
|
#[clap(long, requires = "enable-auto-batching", hide = true)]
|
||||||
pub max_batch_size: Option<usize>,
|
pub max_batch_size: Option<usize>,
|
||||||
|
|
||||||
// The maximum number of documents in a document batch. Since batches must contain at least one
|
// The maximum number of documents in a document batch. Since batches must contain at least one
|
||||||
// update for the scheduler to make progress, the number of documents in a batch will be at
|
// update for the scheduler to make progress, the number of documents in a batch will be at
|
||||||
// least the number of documents of its first update.
|
// least the number of documents of its first update.
|
||||||
#[clap(long, requires = "enable-autobatching", hide = true)]
|
#[clap(long, requires = "enable-auto-batching", hide = true)]
|
||||||
pub max_documents_per_batch: Option<usize>,
|
pub max_documents_per_batch: Option<usize>,
|
||||||
|
|
||||||
/// Debounce duration in seconds
|
/// Debounce duration in seconds
|
||||||
///
|
///
|
||||||
/// When a new task is enqueued, the scheduler waits for `debounce_duration_sec` seconds for new updates before
|
/// When a new task is enqueued, the scheduler waits for `debounce_duration_sec` seconds for new updates before
|
||||||
/// starting to process a batch of updates.
|
/// starting to process a batch of updates.
|
||||||
#[clap(long, requires = "enable-autobatching", hide = true)]
|
#[clap(long, requires = "enable-auto-batching", hide = true)]
|
||||||
pub debounce_duration_sec: Option<u64>,
|
pub debounce_duration_sec: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -149,7 +149,7 @@ impl SnapshotJob {
|
|||||||
let env = open_meta_env(&self.src_path, self.meta_env_size)?;
|
let env = open_meta_env(&self.src_path, self.meta_env_size)?;
|
||||||
|
|
||||||
let dst = path.join("data.mdb");
|
let dst = path.join("data.mdb");
|
||||||
env.copy_to_path(dst, heed::CompactionOption::Enabled)?;
|
env.copy_to_path(dst, milli::heed::CompactionOption::Enabled)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -180,12 +180,12 @@ impl SnapshotJob {
|
|||||||
|
|
||||||
let dst = dst.join("data.mdb");
|
let dst = dst.join("data.mdb");
|
||||||
|
|
||||||
let mut options = heed::EnvOpenOptions::new();
|
let mut options = milli::heed::EnvOpenOptions::new();
|
||||||
options.map_size(self.index_size);
|
options.map_size(self.index_size);
|
||||||
let index = milli::Index::new(options, entry.path())?;
|
let index = milli::Index::new(options, entry.path())?;
|
||||||
index
|
index
|
||||||
.env
|
.env
|
||||||
.copy_to_path(dst, heed::CompactionOption::Enabled)?;
|
.copy_to_path(dst, milli::heed::CompactionOption::Enabled)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -198,7 +198,7 @@ impl SnapshotJob {
|
|||||||
let dst = dst.join("data.mdb");
|
let dst = dst.join("data.mdb");
|
||||||
|
|
||||||
let env = open_auth_store_env(&auth_path)?;
|
let env = open_auth_store_env(&auth_path)?;
|
||||||
env.copy_to_path(dst, heed::CompactionOption::Enabled)?;
|
env.copy_to_path(dst, milli::heed::CompactionOption::Enabled)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use chrono::{DateTime, Utc};
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use super::task::Task;
|
use super::task::Task;
|
||||||
|
|
||||||
@ -7,7 +7,7 @@ pub type BatchId = u64;
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Batch {
|
pub struct Batch {
|
||||||
pub id: BatchId,
|
pub id: BatchId,
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: OffsetDateTime,
|
||||||
pub tasks: Vec<Task>,
|
pub tasks: Vec<Task>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ pub enum TaskError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
internal_error!(
|
internal_error!(
|
||||||
TaskError: heed::Error,
|
TaskError: milli::heed::Error,
|
||||||
JoinError,
|
JoinError,
|
||||||
std::io::Error,
|
std::io::Error,
|
||||||
serde_json::Error,
|
serde_json::Error,
|
||||||
|
@ -6,8 +6,8 @@ use std::sync::Arc;
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use atomic_refcell::AtomicRefCell;
|
use atomic_refcell::AtomicRefCell;
|
||||||
use chrono::Utc;
|
|
||||||
use milli::update::IndexDocumentsMethod;
|
use milli::update::IndexDocumentsMethod;
|
||||||
|
use time::OffsetDateTime;
|
||||||
use tokio::sync::{watch, RwLock};
|
use tokio::sync::{watch, RwLock};
|
||||||
|
|
||||||
use crate::options::SchedulerConfig;
|
use crate::options::SchedulerConfig;
|
||||||
@ -218,7 +218,7 @@ impl Scheduler {
|
|||||||
let debounce_time = config.debounce_duration_sec;
|
let debounce_time = config.debounce_duration_sec;
|
||||||
|
|
||||||
// Disable autobatching
|
// Disable autobatching
|
||||||
if !config.enable_autobatching {
|
if !config.enable_auto_batching {
|
||||||
config.max_batch_size = Some(1);
|
config.max_batch_size = Some(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -357,7 +357,7 @@ impl Scheduler {
|
|||||||
tasks.iter_mut().for_each(|t| {
|
tasks.iter_mut().for_each(|t| {
|
||||||
t.events.push(TaskEvent::Batched {
|
t.events.push(TaskEvent::Batched {
|
||||||
batch_id: id,
|
batch_id: id,
|
||||||
timestamp: Utc::now(),
|
timestamp: OffsetDateTime::now_utc(),
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -365,7 +365,7 @@ impl Scheduler {
|
|||||||
|
|
||||||
let batch = Batch {
|
let batch = Batch {
|
||||||
id,
|
id,
|
||||||
created_at: Utc::now(),
|
created_at: OffsetDateTime::now_utc(),
|
||||||
tasks,
|
tasks,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use meilisearch_error::ResponseError;
|
use meilisearch_error::ResponseError;
|
||||||
use milli::update::{DocumentAdditionResult, IndexDocumentsMethod};
|
use milli::update::{DocumentAdditionResult, IndexDocumentsMethod};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use time::OffsetDateTime;
|
||||||
use tokio::sync::oneshot;
|
use tokio::sync::oneshot;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
@ -36,22 +36,33 @@ impl From<DocumentAdditionResult> for TaskResult {
|
|||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||||
pub enum TaskEvent {
|
pub enum TaskEvent {
|
||||||
Created(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] DateTime<Utc>),
|
Created(
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
OffsetDateTime,
|
||||||
|
),
|
||||||
Batched {
|
Batched {
|
||||||
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
||||||
timestamp: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
timestamp: OffsetDateTime,
|
||||||
batch_id: BatchId,
|
batch_id: BatchId,
|
||||||
},
|
},
|
||||||
Processing(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] DateTime<Utc>),
|
Processing(
|
||||||
|
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
OffsetDateTime,
|
||||||
|
),
|
||||||
Succeded {
|
Succeded {
|
||||||
result: TaskResult,
|
result: TaskResult,
|
||||||
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
||||||
timestamp: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
timestamp: OffsetDateTime,
|
||||||
},
|
},
|
||||||
Failed {
|
Failed {
|
||||||
error: ResponseError,
|
error: ResponseError,
|
||||||
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
||||||
timestamp: DateTime<Utc>,
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
timestamp: OffsetDateTime,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -165,7 +176,7 @@ mod test {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn datetime_strategy() -> impl Strategy<Value = DateTime<Utc>> {
|
pub(super) fn datetime_strategy() -> impl Strategy<Value = OffsetDateTime> {
|
||||||
Just(Utc::now())
|
Just(OffsetDateTime::now_utc())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,9 +5,9 @@ use std::io::{BufWriter, Write};
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chrono::Utc;
|
|
||||||
use heed::{Env, RwTxn};
|
|
||||||
use log::debug;
|
use log::debug;
|
||||||
|
use milli::heed::{Env, RwTxn};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use super::error::TaskError;
|
use super::error::TaskError;
|
||||||
use super::task::{Task, TaskContent, TaskId};
|
use super::task::{Task, TaskContent, TaskId};
|
||||||
@ -61,7 +61,7 @@ impl Clone for TaskStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl TaskStore {
|
impl TaskStore {
|
||||||
pub fn new(env: Arc<heed::Env>) -> Result<Self> {
|
pub fn new(env: Arc<milli::heed::Env>) -> Result<Self> {
|
||||||
let store = Arc::new(Store::new(env)?);
|
let store = Arc::new(Store::new(env)?);
|
||||||
Ok(Self { store })
|
Ok(Self { store })
|
||||||
}
|
}
|
||||||
@ -72,7 +72,7 @@ impl TaskStore {
|
|||||||
let task = tokio::task::spawn_blocking(move || -> Result<Task> {
|
let task = tokio::task::spawn_blocking(move || -> Result<Task> {
|
||||||
let mut txn = store.wtxn()?;
|
let mut txn = store.wtxn()?;
|
||||||
let next_task_id = store.next_task_id(&mut txn)?;
|
let next_task_id = store.next_task_id(&mut txn)?;
|
||||||
let created_at = TaskEvent::Created(Utc::now());
|
let created_at = TaskEvent::Created(OffsetDateTime::now_utc());
|
||||||
let task = Task {
|
let task = Task {
|
||||||
id: next_task_id,
|
id: next_task_id,
|
||||||
index_uid,
|
index_uid,
|
||||||
@ -248,7 +248,7 @@ pub mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl MockTaskStore {
|
impl MockTaskStore {
|
||||||
pub fn new(env: Arc<heed::Env>) -> Result<Self> {
|
pub fn new(env: Arc<milli::heed::Env>) -> Result<Self> {
|
||||||
Ok(Self::Real(TaskStore::new(env)?))
|
Ok(Self::Real(TaskStore::new(env)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
type BEU64 = heed::zerocopy::U64<heed::byteorder::BE>;
|
type BEU64 = milli::heed::zerocopy::U64<milli::heed::byteorder::BE>;
|
||||||
|
|
||||||
const UID_TASK_IDS: &str = "uid_task_id";
|
const UID_TASK_IDS: &str = "uid_task_id";
|
||||||
const TASKS: &str = "tasks";
|
const TASKS: &str = "tasks";
|
||||||
@ -12,8 +12,8 @@ use std::ops::Range;
|
|||||||
use std::result::Result as StdResult;
|
use std::result::Result as StdResult;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use heed::types::{ByteSlice, OwnedType, SerdeJson, Unit};
|
use milli::heed::types::{ByteSlice, OwnedType, SerdeJson, Unit};
|
||||||
use heed::{BytesDecode, BytesEncode, Database, Env, RoTxn, RwTxn};
|
use milli::heed::{BytesDecode, BytesEncode, Database, Env, RoTxn, RwTxn};
|
||||||
|
|
||||||
use crate::tasks::task::{Task, TaskId};
|
use crate::tasks::task::{Task, TaskId};
|
||||||
|
|
||||||
@ -73,7 +73,7 @@ impl Store {
|
|||||||
/// be in an invalid state, with dangling processing tasks.
|
/// be in an invalid state, with dangling processing tasks.
|
||||||
/// You want to patch all un-finished tasks and put them in your pending
|
/// You want to patch all un-finished tasks and put them in your pending
|
||||||
/// queue with the `reset_and_return_unfinished_update` method.
|
/// queue with the `reset_and_return_unfinished_update` method.
|
||||||
pub fn new(env: Arc<heed::Env>) -> Result<Self> {
|
pub fn new(env: Arc<milli::heed::Env>) -> Result<Self> {
|
||||||
let uids_task_ids = env.create_database(Some(UID_TASK_IDS))?;
|
let uids_task_ids = env.create_database(Some(UID_TASK_IDS))?;
|
||||||
let tasks = env.create_database(Some(TASKS))?;
|
let tasks = env.create_database(Some(TASKS))?;
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ impl Store {
|
|||||||
let range = from..limit
|
let range = from..limit
|
||||||
.map(|limit| (limit as u64).saturating_add(from))
|
.map(|limit| (limit as u64).saturating_add(from))
|
||||||
.unwrap_or(u64::MAX);
|
.unwrap_or(u64::MAX);
|
||||||
let iter: Box<dyn Iterator<Item = StdResult<_, heed::Error>>> = match filter {
|
let iter: Box<dyn Iterator<Item = StdResult<_, milli::heed::Error>>> = match filter {
|
||||||
Some(
|
Some(
|
||||||
ref filter @ TaskFilter {
|
ref filter @ TaskFilter {
|
||||||
indexes: Some(_), ..
|
indexes: Some(_), ..
|
||||||
@ -150,7 +150,7 @@ impl Store {
|
|||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
let apply_fitler = |task: &StdResult<_, heed::Error>| match task {
|
let apply_fitler = |task: &StdResult<_, milli::heed::Error>| match task {
|
||||||
Ok(ref t) => filter
|
Ok(ref t) => filter
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|filter| filter.filter_fn.as_ref())
|
.and_then(|filter| filter.filter_fn.as_ref())
|
||||||
@ -162,7 +162,7 @@ impl Store {
|
|||||||
let tasks = iter
|
let tasks = iter
|
||||||
.filter(apply_fitler)
|
.filter(apply_fitler)
|
||||||
.take(limit.unwrap_or(usize::MAX))
|
.take(limit.unwrap_or(usize::MAX))
|
||||||
.try_fold::<_, _, StdResult<_, heed::Error>>(Vec::new(), |mut v, task| {
|
.try_fold::<_, _, StdResult<_, milli::heed::Error>>(Vec::new(), |mut v, task| {
|
||||||
v.push(task?);
|
v.push(task?);
|
||||||
Ok(v)
|
Ok(v)
|
||||||
})?;
|
})?;
|
||||||
@ -172,7 +172,7 @@ impl Store {
|
|||||||
|
|
||||||
fn compute_candidates(
|
fn compute_candidates(
|
||||||
&self,
|
&self,
|
||||||
txn: &heed::RoTxn,
|
txn: &milli::heed::RoTxn,
|
||||||
filter: &TaskFilter,
|
filter: &TaskFilter,
|
||||||
range: Range<TaskId>,
|
range: Range<TaskId>,
|
||||||
) -> Result<BinaryHeap<TaskId>> {
|
) -> Result<BinaryHeap<TaskId>> {
|
||||||
@ -188,10 +188,10 @@ impl Store {
|
|||||||
self.uids_task_ids
|
self.uids_task_ids
|
||||||
.remap_key_type::<ByteSlice>()
|
.remap_key_type::<ByteSlice>()
|
||||||
.rev_prefix_iter(txn, &index_uid)?
|
.rev_prefix_iter(txn, &index_uid)?
|
||||||
.map(|entry| -> StdResult<_, heed::Error> {
|
.map(|entry| -> StdResult<_, milli::heed::Error> {
|
||||||
let (key, _) = entry?;
|
let (key, _) = entry?;
|
||||||
let (_, id) =
|
let (_, id) = IndexUidTaskIdCodec::bytes_decode(key)
|
||||||
IndexUidTaskIdCodec::bytes_decode(key).ok_or(heed::Error::Decoding)?;
|
.ok_or(milli::heed::Error::Decoding)?;
|
||||||
Ok(id)
|
Ok(id)
|
||||||
})
|
})
|
||||||
.skip_while(|entry| {
|
.skip_while(|entry| {
|
||||||
@ -212,7 +212,7 @@ impl Store {
|
|||||||
// if we encounter an error we returns true to collect it later
|
// if we encounter an error we returns true to collect it later
|
||||||
.unwrap_or(true)
|
.unwrap_or(true)
|
||||||
})
|
})
|
||||||
.try_for_each::<_, StdResult<(), heed::Error>>(|id| {
|
.try_for_each::<_, StdResult<(), milli::heed::Error>>(|id| {
|
||||||
candidates.push(id?);
|
candidates.push(id?);
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
@ -225,8 +225,8 @@ impl Store {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod test {
|
pub mod test {
|
||||||
use heed::EnvOpenOptions;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use milli::heed::EnvOpenOptions;
|
||||||
use nelson::Mocker;
|
use nelson::Mocker;
|
||||||
use proptest::collection::vec;
|
use proptest::collection::vec;
|
||||||
use proptest::prelude::*;
|
use proptest::prelude::*;
|
||||||
@ -244,10 +244,10 @@ pub mod test {
|
|||||||
Fake(Mocker),
|
Fake(Mocker),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TmpEnv(TempDir, Arc<heed::Env>);
|
pub struct TmpEnv(TempDir, Arc<milli::heed::Env>);
|
||||||
|
|
||||||
impl TmpEnv {
|
impl TmpEnv {
|
||||||
pub fn env(&self) -> Arc<heed::Env> {
|
pub fn env(&self) -> Arc<milli::heed::Env> {
|
||||||
self.1.clone()
|
self.1.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -264,7 +264,7 @@ pub mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl MockStore {
|
impl MockStore {
|
||||||
pub fn new(env: Arc<heed::Env>) -> Result<Self> {
|
pub fn new(env: Arc<milli::heed::Env>) -> Result<Self> {
|
||||||
Ok(Self::Real(Store::new(env)?))
|
Ok(Self::Real(Store::new(env)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use chrono::Utc;
|
use time::OffsetDateTime;
|
||||||
use tokio::sync::{watch, RwLock};
|
use tokio::sync::{watch, RwLock};
|
||||||
use tokio::time::interval_at;
|
use tokio::time::interval_at;
|
||||||
|
|
||||||
@ -63,7 +63,8 @@ where
|
|||||||
match pending {
|
match pending {
|
||||||
Pending::Batch(mut batch) => {
|
Pending::Batch(mut batch) => {
|
||||||
for task in &mut batch.tasks {
|
for task in &mut batch.tasks {
|
||||||
task.events.push(TaskEvent::Processing(Utc::now()));
|
task.events
|
||||||
|
.push(TaskEvent::Processing(OffsetDateTime::now_utc()));
|
||||||
}
|
}
|
||||||
|
|
||||||
batch.tasks = {
|
batch.tasks = {
|
||||||
|
Loading…
Reference in New Issue
Block a user