mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-22 18:17:39 +08:00
Merge #2096
2096: feat(auth): Tenant token r=Kerollmops a=ManyTheFish Make meilisearch support JWT authentication signed with meilisearch API keys using HS256, HS384 or HS512 algorithms. Related spec: [specifications#89](https://github.com/meilisearch/specifications/pull/89) [rendered](https://github.com/meilisearch/specifications/blob/scoped-api-keys/text/0089-tenant-tokens.md) Fix #1991 Co-authored-by: ManyTheFish <many@meilisearch.com>
This commit is contained in:
commit
622c15e825
62
Cargo.lock
generated
62
Cargo.lock
generated
@ -47,7 +47,7 @@ dependencies = [
|
||||
"actix-tls",
|
||||
"actix-utils",
|
||||
"ahash 0.7.6",
|
||||
"base64",
|
||||
"base64 0.13.0",
|
||||
"bitflags",
|
||||
"brotli",
|
||||
"bytes",
|
||||
@ -386,6 +386,12 @@ dependencies = [
|
||||
"rustc-demangle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.13.0"
|
||||
@ -1521,6 +1527,20 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonwebtoken"
|
||||
version = "7.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "afabcc15e437a6484fc4f12d0fd63068fe457bf93f1c148d3d9649c60b103f32"
|
||||
dependencies = [
|
||||
"base64 0.12.3",
|
||||
"pem",
|
||||
"ring",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"simple_asn1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "language-tags"
|
||||
version = "0.3.2"
|
||||
@ -1715,6 +1735,7 @@ dependencies = [
|
||||
"http",
|
||||
"indexmap",
|
||||
"itertools",
|
||||
"jsonwebtoken",
|
||||
"log",
|
||||
"maplit",
|
||||
"meilisearch-auth",
|
||||
@ -2038,6 +2059,17 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.44"
|
||||
@ -2183,6 +2215,17 @@ version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ec91767ecc0a0bbe558ce8c9da33c068066c57ecc8bb8477ef8c1ad3ef77c27"
|
||||
|
||||
[[package]]
|
||||
name = "pem"
|
||||
version = "0.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb"
|
||||
dependencies = [
|
||||
"base64 0.13.0",
|
||||
"once_cell",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "2.1.0"
|
||||
@ -2543,7 +2586,7 @@ version = "0.11.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f242f1488a539a79bac6dbe7c8609ae43b7914b7736210f239a37cccb32525"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.13.0",
|
||||
"bytes",
|
||||
"encoding_rs",
|
||||
"futures-core",
|
||||
@ -2641,7 +2684,7 @@ version = "0.19.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.13.0",
|
||||
"log",
|
||||
"ring",
|
||||
"sct 0.6.1",
|
||||
@ -2666,7 +2709,7 @@ version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.13.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2848,6 +2891,17 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simple_asn1"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "692ca13de57ce0613a363c8c2f1de925adebc81b04c923ac60c5488bb44abe4b"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "siphasher"
|
||||
version = "0.3.9"
|
||||
|
@ -4,11 +4,13 @@ pub mod error;
|
||||
mod key;
|
||||
mod store;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::Path;
|
||||
use std::str::from_utf8;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
@ -54,7 +56,11 @@ impl AuthController {
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
|
||||
}
|
||||
|
||||
pub fn get_key_filters(&self, key: impl AsRef<str>) -> Result<AuthFilter> {
|
||||
pub fn get_key_filters(
|
||||
&self,
|
||||
key: impl AsRef<str>,
|
||||
search_rules: Option<SearchRules>,
|
||||
) -> Result<AuthFilter> {
|
||||
let mut filters = AuthFilter::default();
|
||||
if self
|
||||
.master_key
|
||||
@ -67,7 +73,22 @@ impl AuthController {
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))?;
|
||||
|
||||
if !key.indexes.iter().any(|i| i.as_str() == "*") {
|
||||
filters.indexes = Some(key.indexes);
|
||||
filters.search_rules = match search_rules {
|
||||
// Intersect search_rules with parent key authorized indexes.
|
||||
Some(search_rules) => SearchRules::Map(
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules
|
||||
.get_index_search_rules(&index)
|
||||
.map(|index_search_rules| (index, Some(index_search_rules)))
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
}
|
||||
|
||||
filters.allow_index_creation = key
|
||||
@ -97,50 +118,149 @@ impl AuthController {
|
||||
self.master_key.as_ref()
|
||||
}
|
||||
|
||||
pub fn authenticate(&self, token: &[u8], action: Action, index: Option<&[u8]>) -> Result<bool> {
|
||||
if let Some(master_key) = &self.master_key {
|
||||
if let Some((id, exp)) = self
|
||||
/// Generate a valid key from a key id using the current master key.
|
||||
/// Returns None if no master key has been set.
|
||||
pub fn generate_key(&self, id: &str) -> Option<String> {
|
||||
self.master_key
|
||||
.as_ref()
|
||||
.map(|master_key| generate_key(master_key.as_bytes(), id))
|
||||
}
|
||||
|
||||
/// Check if the provided key is authorized to make a specific action
|
||||
/// without checking if the key is valid.
|
||||
pub fn is_key_authorized(
|
||||
&self,
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
index: Option<&str>,
|
||||
) -> Result<bool> {
|
||||
match self
|
||||
.store
|
||||
// check if the key has access to all indexes.
|
||||
.get_expiration_date(token, action, None)?
|
||||
.get_expiration_date(key, action, None)?
|
||||
.or(match index {
|
||||
// else check if the key has access to the requested index.
|
||||
Some(index) => self.store.get_expiration_date(token, action, Some(index))?,
|
||||
// or to any index if no index has been requested.
|
||||
None => self.store.prefix_first_expiration_date(token, action)?,
|
||||
})
|
||||
{
|
||||
let id = from_utf8(&id)?;
|
||||
if exp.map_or(true, |exp| Utc::now() < exp)
|
||||
&& generate_key(master_key.as_bytes(), id).as_bytes() == token
|
||||
{
|
||||
return Ok(true);
|
||||
Some(index) => {
|
||||
self.store
|
||||
.get_expiration_date(key, action, Some(index.as_bytes()))?
|
||||
}
|
||||
// or to any index if no index has been requested.
|
||||
None => self.store.prefix_first_expiration_date(key, action)?,
|
||||
}) {
|
||||
// check expiration date.
|
||||
Some(Some(exp)) => Ok(Utc::now() < exp),
|
||||
// no expiration date.
|
||||
Some(None) => Ok(true),
|
||||
// action or index forbidden.
|
||||
None => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if the provided key is valid
|
||||
/// without checking if the key is authorized to make a specific action.
|
||||
pub fn is_key_valid(&self, key: &[u8]) -> Result<bool> {
|
||||
if let Some(id) = self.store.get_key_id(key) {
|
||||
let id = from_utf8(&id)?;
|
||||
if let Some(generated) = self.generate_key(id) {
|
||||
return Ok(generated.as_bytes() == key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Check if the provided key is valid
|
||||
/// and is authorized to make a specific action.
|
||||
pub fn authenticate(&self, key: &[u8], action: Action, index: Option<&str>) -> Result<bool> {
|
||||
if self.is_key_authorized(key, action, index)? {
|
||||
self.is_key_valid(key)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AuthFilter {
|
||||
pub indexes: Option<Vec<String>>,
|
||||
pub search_rules: SearchRules,
|
||||
pub allow_index_creation: bool,
|
||||
}
|
||||
|
||||
impl Default for AuthFilter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
indexes: None,
|
||||
search_rules: SearchRules::default(),
|
||||
allow_index_creation: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_key(master_key: &[u8], uid: &str) -> String {
|
||||
let key = [uid.as_bytes(), master_key].concat();
|
||||
/// Transparent wrapper around a list of allowed indexes with the search rules to apply for each.
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum SearchRules {
|
||||
Set(HashSet<String>),
|
||||
Map(HashMap<String, Option<IndexSearchRules>>),
|
||||
}
|
||||
|
||||
impl Default for SearchRules {
|
||||
fn default() -> Self {
|
||||
Self::Set(Some("*".to_string()).into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchRules {
|
||||
pub fn is_index_authorized(&self, index: &str) -> bool {
|
||||
match self {
|
||||
Self::Set(set) => set.contains("*") || set.contains(index),
|
||||
Self::Map(map) => map.contains_key("*") || map.contains_key(index),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
||||
match self {
|
||||
Self::Set(set) => {
|
||||
if set.contains("*") || set.contains(index) {
|
||||
Some(IndexSearchRules::default())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Self::Map(map) => map
|
||||
.get(index)
|
||||
.or_else(|| map.get("*"))
|
||||
.map(|isr| isr.clone().unwrap_or_default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for SearchRules {
|
||||
type Item = (String, IndexSearchRules);
|
||||
type IntoIter = Box<dyn Iterator<Item = Self::Item>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
match self {
|
||||
Self::Set(array) => {
|
||||
Box::new(array.into_iter().map(|i| (i, IndexSearchRules::default())))
|
||||
}
|
||||
Self::Map(map) => {
|
||||
Box::new(map.into_iter().map(|(i, isr)| (i, isr.unwrap_or_default())))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Contains the rules to apply on the top of the search query for a specific index.
|
||||
///
|
||||
/// filter: search filter to apply in addition to query filters.
|
||||
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
|
||||
pub struct IndexSearchRules {
|
||||
pub filter: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
fn generate_key(master_key: &[u8], keyid: &str) -> String {
|
||||
let key = [keyid.as_bytes(), master_key].concat();
|
||||
let sha = Sha256::digest(&key);
|
||||
format!("{}{:x}", uid, sha)
|
||||
format!("{}{:x}", keyid, sha)
|
||||
}
|
||||
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
|
@ -103,18 +103,18 @@ impl HeedAuthStore {
|
||||
|
||||
pub fn get_api_key(&self, key: impl AsRef<str>) -> Result<Option<Key>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match try_split_array_at::<_, KEY_ID_LENGTH>(key.as_ref().as_bytes()) {
|
||||
Some((id, _)) => self.keys.get(&rtxn, id).map_err(|e| e.into()),
|
||||
match self.get_key_id(key.as_ref().as_bytes()) {
|
||||
Some(id) => self.keys.get(&rtxn, &id).map_err(|e| e.into()),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_api_key(&self, key: impl AsRef<str>) -> Result<bool> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let existing = match try_split_array_at(key.as_ref().as_bytes()) {
|
||||
Some((id, _)) => {
|
||||
let existing = self.keys.delete(&mut wtxn, id)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, id)?;
|
||||
let existing = match self.get_key_id(key.as_ref().as_bytes()) {
|
||||
Some(id) => {
|
||||
let existing = self.keys.delete(&mut wtxn, &id)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||
existing
|
||||
}
|
||||
None => false,
|
||||
@ -140,15 +140,12 @@ impl HeedAuthStore {
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
index: Option<&[u8]>,
|
||||
) -> Result<Option<(KeyId, Option<DateTime<Utc>>)>> {
|
||||
) -> Result<Option<Option<DateTime<Utc>>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match try_split_array_at::<_, KEY_ID_LENGTH>(key) {
|
||||
Some((id, _)) => {
|
||||
let tuple = (id, &action, index);
|
||||
Ok(self
|
||||
.action_keyid_index_expiration
|
||||
.get(&rtxn, &tuple)?
|
||||
.map(|expiration| (*id, expiration)))
|
||||
match self.get_key_id(key) {
|
||||
Some(id) => {
|
||||
let tuple = (&id, &action, index);
|
||||
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
@ -158,22 +155,26 @@ impl HeedAuthStore {
|
||||
&self,
|
||||
key: &[u8],
|
||||
action: Action,
|
||||
) -> Result<Option<(KeyId, Option<DateTime<Utc>>)>> {
|
||||
) -> Result<Option<Option<DateTime<Utc>>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match try_split_array_at::<_, KEY_ID_LENGTH>(key) {
|
||||
Some((id, _)) => {
|
||||
let tuple = (id, &action, None);
|
||||
match self.get_key_id(key) {
|
||||
Some(id) => {
|
||||
let tuple = (&id, &action, None);
|
||||
Ok(self
|
||||
.action_keyid_index_expiration
|
||||
.prefix_iter(&rtxn, &tuple)?
|
||||
.next()
|
||||
.transpose()?
|
||||
.map(|(_, expiration)| (*id, expiration)))
|
||||
.map(|(_, expiration)| expiration))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_key_id(&self, key: &[u8]) -> Option<KeyId> {
|
||||
try_split_array_at::<_, KEY_ID_LENGTH>(key).map(|(id, _)| *id)
|
||||
}
|
||||
|
||||
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
|
||||
let mut iter = self
|
||||
.action_keyid_index_expiration
|
||||
|
@ -44,6 +44,7 @@ heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||
http = "0.2.4"
|
||||
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
||||
itertools = "0.10.1"
|
||||
jsonwebtoken = "7"
|
||||
log = "0.4.14"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
|
@ -8,6 +8,7 @@ use actix_web::http::header::USER_AGENT;
|
||||
use actix_web::HttpRequest;
|
||||
use chrono::{DateTime, Utc};
|
||||
use http::header::CONTENT_TYPE;
|
||||
use meilisearch_auth::SearchRules;
|
||||
use meilisearch_lib::index::{SearchQuery, SearchResult};
|
||||
use meilisearch_lib::index_controller::Stats;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
@ -280,7 +281,7 @@ impl Segment {
|
||||
}
|
||||
|
||||
async fn tick(&mut self, meilisearch: MeiliSearch) {
|
||||
if let Ok(stats) = meilisearch.get_all_stats(&None).await {
|
||||
if let Ok(stats) = meilisearch.get_all_stats(&SearchRules::default()).await {
|
||||
let _ = self
|
||||
.batcher
|
||||
.push(Identify {
|
||||
|
@ -50,8 +50,8 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
|
||||
Some("Bearer") => {
|
||||
// TODO: find a less hardcoded way?
|
||||
let index = req.match_info().get("index_uid");
|
||||
let token = type_token.next().unwrap_or("unknown");
|
||||
match P::authenticate(auth, token, index) {
|
||||
match type_token.next() {
|
||||
Some(token) => match P::authenticate(auth, token, index) {
|
||||
Some(filters) => match req.app_data::<D>().cloned() {
|
||||
Some(data) => ok(Self {
|
||||
data,
|
||||
@ -64,6 +64,10 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
|
||||
let token = token.to_string();
|
||||
err(AuthenticationError::InvalidToken(token).into())
|
||||
}
|
||||
},
|
||||
None => {
|
||||
err(AuthenticationError::InvalidToken("unknown".to_string()).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
_otherwise => err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||
@ -90,11 +94,22 @@ pub trait Policy {
|
||||
}
|
||||
|
||||
pub mod policies {
|
||||
use chrono::Utc;
|
||||
use jsonwebtoken::{dangerous_insecure_decode, decode, Algorithm, DecodingKey, Validation};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::extractors::authentication::Policy;
|
||||
use meilisearch_auth::{Action, AuthController, AuthFilter};
|
||||
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
|
||||
// reexport actions in policies in order to be used in routes configuration.
|
||||
pub use meilisearch_auth::actions;
|
||||
|
||||
pub static TENANT_TOKEN_VALIDATION: Lazy<Validation> = Lazy::new(|| Validation {
|
||||
validate_exp: false,
|
||||
algorithms: vec![Algorithm::HS256, Algorithm::HS384, Algorithm::HS512],
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
pub struct MasterPolicy;
|
||||
|
||||
impl Policy for MasterPolicy {
|
||||
@ -126,15 +141,81 @@ pub mod policies {
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
|
||||
// authenticate if token is allowed.
|
||||
if let Some(action) = Action::from_repr(A) {
|
||||
let index = index.map(|i| i.as_bytes());
|
||||
// Tenant token
|
||||
if let Some(filters) = ActionPolicy::<A>::authenticate_tenant_token(&auth, token, index)
|
||||
{
|
||||
return Some(filters);
|
||||
} else if let Some(action) = Action::from_repr(A) {
|
||||
// API key
|
||||
if let Ok(true) = auth.authenticate(token.as_bytes(), action, index) {
|
||||
return auth.get_key_filters(token).ok();
|
||||
return auth.get_key_filters(token, None).ok();
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<const A: u8> ActionPolicy<A> {
|
||||
fn authenticate_tenant_token(
|
||||
auth: &AuthController,
|
||||
token: &str,
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
// Only search action can be accessed by a tenant token.
|
||||
if A != actions::SEARCH {
|
||||
return None;
|
||||
}
|
||||
|
||||
// get token fields without validating it.
|
||||
let Claims {
|
||||
search_rules,
|
||||
exp,
|
||||
api_key_prefix,
|
||||
} = dangerous_insecure_decode::<Claims>(token).ok()?.claims;
|
||||
|
||||
// Check index access if an index restriction is provided.
|
||||
if let Some(index) = index {
|
||||
if !search_rules.is_index_authorized(index) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if token is expired.
|
||||
if let Some(exp) = exp {
|
||||
if Utc::now().timestamp() > exp {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// check if parent key is authorized to do the action.
|
||||
if auth
|
||||
.is_key_authorized(api_key_prefix.as_bytes(), Action::Search, index)
|
||||
.ok()?
|
||||
{
|
||||
// Check if tenant token is valid.
|
||||
let key = auth.generate_key(&api_key_prefix)?;
|
||||
decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(key.as_bytes()),
|
||||
&TENANT_TOKEN_VALIDATION,
|
||||
)
|
||||
.ok()?;
|
||||
|
||||
return auth
|
||||
.get_key_filters(api_key_prefix, Some(search_rules))
|
||||
.ok();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct Claims {
|
||||
search_rules: SearchRules,
|
||||
exp: Option<i64>,
|
||||
api_key_prefix: String,
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use std::str;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::SecondsFormat;
|
||||
|
||||
use meilisearch_auth::{generate_key, Action, AuthController, Key};
|
||||
use meilisearch_auth::{Action, AuthController, Key};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
@ -30,7 +30,7 @@ pub async fn create_api_key(
|
||||
_req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let key = auth_controller.create_key(body.into_inner()).await?;
|
||||
let res = KeyView::from_key(key, auth_controller.get_master_key());
|
||||
let res = KeyView::from_key(key, &auth_controller);
|
||||
|
||||
Ok(HttpResponse::Created().json(res))
|
||||
}
|
||||
@ -42,7 +42,7 @@ pub async fn list_api_keys(
|
||||
let keys = auth_controller.list_keys().await?;
|
||||
let res: Vec<_> = keys
|
||||
.into_iter()
|
||||
.map(|k| KeyView::from_key(k, auth_controller.get_master_key()))
|
||||
.map(|k| KeyView::from_key(k, &auth_controller))
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
|
||||
@ -52,9 +52,8 @@ pub async fn get_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
// keep 8 first characters that are the ID of the API key.
|
||||
let key = auth_controller.get_key(&path.api_key).await?;
|
||||
let res = KeyView::from_key(key, auth_controller.get_master_key());
|
||||
let res = KeyView::from_key(key, &auth_controller);
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
@ -65,10 +64,9 @@ pub async fn patch_api_key(
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let key = auth_controller
|
||||
// keep 8 first characters that are the ID of the API key.
|
||||
.update_key(&path.api_key, body.into_inner())
|
||||
.await?;
|
||||
let res = KeyView::from_key(key, auth_controller.get_master_key());
|
||||
let res = KeyView::from_key(key, &auth_controller);
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
@ -77,7 +75,6 @@ pub async fn delete_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
// keep 8 first characters that are the ID of the API key.
|
||||
auth_controller.delete_key(&path.api_key).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
@ -101,12 +98,9 @@ struct KeyView {
|
||||
}
|
||||
|
||||
impl KeyView {
|
||||
fn from_key(key: Key, master_key: Option<&String>) -> Self {
|
||||
fn from_key(key: Key, auth: &AuthController) -> Self {
|
||||
let key_id = str::from_utf8(&key.id).unwrap();
|
||||
let generated_key = match master_key {
|
||||
Some(master_key) => generate_key(master_key.as_bytes(), key_id),
|
||||
None => generate_key(&[], key_id),
|
||||
};
|
||||
let generated_key = auth.generate_key(key_id).unwrap_or_default();
|
||||
|
||||
KeyView {
|
||||
description: key.description,
|
||||
|
@ -41,14 +41,13 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
pub async fn list_indexes(
|
||||
data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let filters = data.filters();
|
||||
let mut indexes = data.list_indexes().await?;
|
||||
if let Some(indexes_filter) = filters.indexes.as_ref() {
|
||||
indexes = indexes
|
||||
let search_rules = &data.filters().search_rules;
|
||||
let indexes: Vec<_> = data
|
||||
.list_indexes()
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|i| indexes_filter.contains(&i.uid))
|
||||
.filter(|i| search_rules.is_index_authorized(&i.uid))
|
||||
.collect();
|
||||
}
|
||||
|
||||
debug!("returns: {:?}", indexes);
|
||||
Ok(HttpResponse::Ok().json(indexes))
|
||||
|
@ -1,5 +1,6 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{default_crop_length, SearchQuery, DEFAULT_SEARCH_LIMIT};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
@ -79,6 +80,26 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
}
|
||||
}
|
||||
|
||||
/// Incorporate search rules in search query
|
||||
fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) {
|
||||
query.filter = match (query.filter.take(), rules.filter) {
|
||||
(None, rules_filter) => rules_filter,
|
||||
(filter, None) => filter,
|
||||
(Some(filter), Some(rules_filter)) => {
|
||||
let filter = match filter {
|
||||
Value::Array(filter) => filter,
|
||||
filter => vec![filter],
|
||||
};
|
||||
let rules_filter = match rules_filter {
|
||||
Value::Array(rules_filter) => rules_filter,
|
||||
rules_filter => vec![rules_filter],
|
||||
};
|
||||
|
||||
Some(Value::Array([filter, rules_filter].concat()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: TAMO: split on :asc, and :desc, instead of doing some weird things
|
||||
|
||||
/// Transform the sort query parameter into something that matches the post expected format.
|
||||
@ -113,11 +134,21 @@ pub async fn search_with_url_query(
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
let query: SearchQuery = params.into_inner().into();
|
||||
let mut query: SearchQuery = params.into_inner().into();
|
||||
|
||||
let index_uid = path.into_inner();
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = meilisearch
|
||||
.filters()
|
||||
.search_rules
|
||||
.get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
|
||||
let search_result = meilisearch.search(path.into_inner(), query).await;
|
||||
let search_result = meilisearch.search(index_uid, query).await;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
@ -140,12 +171,22 @@ pub async fn search_with_post(
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let query = params.into_inner();
|
||||
let mut query = params.into_inner();
|
||||
debug!("search called with params: {:?}", query);
|
||||
|
||||
let index_uid = path.into_inner();
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = meilisearch
|
||||
.filters()
|
||||
.search_rules
|
||||
.get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
|
||||
let search_result = meilisearch.search(path.into_inner(), query).await;
|
||||
let search_result = meilisearch.search(index_uid, query).await;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
|
@ -127,9 +127,9 @@ pub async fn running() -> HttpResponse {
|
||||
async fn get_stats(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::STATS_GET }>, MeiliSearch>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let filters = meilisearch.filters();
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
|
||||
let response = meilisearch.get_all_stats(&filters.indexes).await?;
|
||||
let response = meilisearch.get_all_stats(search_rules).await?;
|
||||
|
||||
debug!("returns: {:?}", response);
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
|
@ -25,13 +25,16 @@ async fn get_tasks(
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let filters = meilisearch.filters().indexes.as_ref().map(|indexes| {
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let filters = if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for index in indexes {
|
||||
filters.filter_index(index.to_string());
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
filters
|
||||
});
|
||||
Some(filters)
|
||||
};
|
||||
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_tasks(filters, None, None)
|
||||
@ -56,13 +59,16 @@ async fn get_task(
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let filters = meilisearch.filters().indexes.as_ref().map(|indexes| {
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let filters = if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for index in indexes {
|
||||
filters.filter_index(index.to_string());
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
filters
|
||||
});
|
||||
Some(filters)
|
||||
};
|
||||
|
||||
let task: TaskView = meilisearch
|
||||
.get_task(task_id.into_inner(), filters)
|
||||
|
@ -5,7 +5,7 @@ use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||
Lazy::new(|| {
|
||||
hashmap! {
|
||||
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
@ -49,7 +49,7 @@ static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'stat
|
||||
}
|
||||
});
|
||||
|
||||
static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
||||
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
||||
AUTHORIZATIONS
|
||||
.values()
|
||||
.cloned()
|
||||
|
@ -1,6 +1,7 @@
|
||||
mod api_keys;
|
||||
mod authorization;
|
||||
mod payload;
|
||||
mod tenant_token;
|
||||
|
||||
use crate::common::Server;
|
||||
use actix_web::http::StatusCode;
|
||||
|
574
meilisearch-http/tests/auth/tenant_token.rs
Normal file
574
meilisearch-http/tests/auth/tenant_token.rs
Normal file
@ -0,0 +1,574 @@
|
||||
use crate::common::Server;
|
||||
use chrono::{Duration, Utc};
|
||||
use maplit::hashmap;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||
|
||||
fn generate_tenant_token(parent_key: impl AsRef<str>, mut body: HashMap<&str, Value>) -> String {
|
||||
use jsonwebtoken::{encode, EncodingKey, Header};
|
||||
|
||||
let key_id = &parent_key.as_ref()[..8];
|
||||
body.insert("apiKeyPrefix", json!(key_id));
|
||||
encode(
|
||||
&Header::default(),
|
||||
&body,
|
||||
&EncodingKey::from_secret(parent_key.as_ref().as_bytes()),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"id": "287947",
|
||||
"color": ["green", "blue"]
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537",
|
||||
"color": ["yellow", "blue"]
|
||||
},
|
||||
{
|
||||
"title": "Escape Room",
|
||||
"id": "522681",
|
||||
"color": ["yellow", "red"]
|
||||
},
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": ["green", "red"]
|
||||
},
|
||||
{
|
||||
"title": "Glass",
|
||||
"id": "450465",
|
||||
"color": ["blue", "red"]
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||
json!({"message": "The provided API key is invalid.",
|
||||
"code": "invalid_api_key",
|
||||
"type": "auth",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||
})
|
||||
});
|
||||
|
||||
static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
vec![
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": Utc::now() + Duration::days(1)
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": Utc::now() + Duration::days(1)
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": Utc::now() + Duration::days(1)
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": Utc::now() + Duration::days(1)
|
||||
}),
|
||||
]
|
||||
});
|
||||
|
||||
static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
vec![
|
||||
// no search action
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||
"expiresAt": Utc::now() + Duration::days(1)
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||
"expiresAt": Utc::now() + Duration::days(1)
|
||||
}),
|
||||
// bad index
|
||||
json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": Utc::now() + Duration::days(1)
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": Utc::now() + Duration::days(1)
|
||||
}),
|
||||
]
|
||||
});
|
||||
|
||||
macro_rules! compute_autorized_search {
|
||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
drop(index);
|
||||
|
||||
for key_content in ACCEPTED_KEYS.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
.search(json!({ "filter": $filter }), |response, code| {
|
||||
assert_eq!(
|
||||
code, 200,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response, tenant_token, key_content
|
||||
);
|
||||
assert_eq!(
|
||||
response["hits"].as_array().unwrap().len(),
|
||||
$expected_count,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response,
|
||||
tenant_token,
|
||||
key_content
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! compute_forbidden_search {
|
||||
($tenant_tokens:expr, $parent_keys:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
drop(index);
|
||||
|
||||
for key_content in $parent_keys.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201, "{:?}", response);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response,
|
||||
tenant_token,
|
||||
key_content
|
||||
);
|
||||
assert_eq!(
|
||||
code, 403,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response, tenant_token, key_content
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn search_authorized_simple_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
];
|
||||
|
||||
compute_autorized_search!(tenant_tokens, {}, 5);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
// filter on sales should override filters on *
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_autorized_search!(tenant_tokens, {}, 3);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn filter_search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
// filter on sales should override filters on *
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_autorized_search!(tenant_tokens, "color = yellow", 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_search_token_forbidden_parent_key() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_forbidden_search!(tenant_tokens, REFUSED_KEYS);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_search_forbidden_token() {
|
||||
let tenant_tokens = vec![
|
||||
// bad index
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
// expired token
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((Utc::now() - Duration::hours(1)).timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_forbidden_search!(tenant_tokens, ACCEPTED_KEYS);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_forbidden_routes() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (Utc::now() + Duration::hours(1)),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
if !actions.contains("search") {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_expired_parent_key() {
|
||||
use std::{thread, time};
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (Utc::now() + Duration::seconds(1)),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while parent_key is not expired
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
|
||||
// wait until the key is expired.
|
||||
thread::sleep(time::Duration::new(1, 0));
|
||||
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_modified_token() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (Utc::now() + Duration::hours(1)),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while web_token is valid
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((Utc::now() + Duration::hours(1)).timestamp())
|
||||
};
|
||||
|
||||
let alt = generate_tenant_token(&key, tenant_token);
|
||||
let altered_token = [
|
||||
web_token.split('.').next().unwrap(),
|
||||
alt.split('.').nth(1).unwrap(),
|
||||
web_token.split('.').nth(2).unwrap(),
|
||||
]
|
||||
.join(".");
|
||||
|
||||
server.use_api_key(&altered_token);
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
@ -1,3 +1,4 @@
|
||||
use meilisearch_auth::SearchRules;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::io::Cursor;
|
||||
@ -559,17 +560,14 @@ where
|
||||
Ok(stats)
|
||||
}
|
||||
|
||||
pub async fn get_all_stats(&self, index_filter: &Option<Vec<String>>) -> Result<Stats> {
|
||||
pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> {
|
||||
let mut last_task: Option<DateTime<_>> = None;
|
||||
let mut indexes = BTreeMap::new();
|
||||
let mut database_size = 0;
|
||||
let processing_task = self.task_store.get_processing_task().await?;
|
||||
|
||||
for (index_uid, index) in self.index_resolver.list().await? {
|
||||
if index_filter
|
||||
.as_ref()
|
||||
.map_or(false, |filter| !filter.contains(&index_uid))
|
||||
{
|
||||
if !search_rules.is_index_authorized(&index_uid) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user