diff --git a/.github/workflows/publish-binaries.yml b/.github/workflows/publish-binaries.yml index 971cbe7ac..c7744e35e 100644 --- a/.github/workflows/publish-binaries.yml +++ b/.github/workflows/publish-binaries.yml @@ -9,6 +9,7 @@ jobs: name: Publish for ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: os: [ubuntu-18.04, macos-latest, windows-latest] include: diff --git a/.github/workflows/publish-docker-latest.yml b/.github/workflows/publish-docker-latest.yml index e88bed39d..59cbf9123 100644 --- a/.github/workflows/publish-docker-latest.yml +++ b/.github/workflows/publish-docker-latest.yml @@ -7,7 +7,7 @@ name: Publish latest image to Docker Hub jobs: docker-latest: - runs-on: self-hosted + runs-on: docker steps: - name: Set up QEMU uses: docker/setup-qemu-action@v1 diff --git a/.github/workflows/publish-docker-tag.yml b/.github/workflows/publish-docker-tag.yml index 05ecb5c58..eca3d1d25 100644 --- a/.github/workflows/publish-docker-tag.yml +++ b/.github/workflows/publish-docker-tag.yml @@ -8,7 +8,7 @@ name: Publish tagged image to Docker Hub jobs: docker-tag: - runs-on: self-hosted + runs-on: docker steps: - name: Set up QEMU uses: docker/setup-qemu-action@v1 diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 21ef08b76..a98e29f44 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -11,6 +11,7 @@ on: env: CARGO_TERM_COLOR: always + RUST_BACKTRACE: 1 jobs: tests: diff --git a/Cargo.lock b/Cargo.lock index e3635645d..94097618e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1749,6 +1749,7 @@ dependencies = [ "itertools", "lazy_static", "log", + "meilisearch-auth", "meilisearch-error", "milli", "mime", diff --git a/Dockerfile b/Dockerfile index 0431b5f26..ff88d5bc6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -35,18 +35,12 @@ RUN $HOME/.cargo/bin/cargo build --release # Run FROM alpine:3.14 -ARG USER=meili -ENV HOME /home/${USER} ENV MEILI_HTTP_ADDR 0.0.0.0:7700 ENV MEILI_SERVER_PROVIDER docker -# download runtime deps as root and create ${USER} RUN apk update --quiet \ - && apk add -q --no-cache libgcc tini curl \ - && adduser -D ${USER} -WORKDIR ${HOME} -USER ${USER} -# copy file as ${USER} to ${HOME} + && apk add -q --no-cache libgcc tini curl + COPY --from=compiler /meilisearch/target/release/meilisearch . EXPOSE 7700/tcp diff --git a/meilisearch-auth/src/action.rs b/meilisearch-auth/src/action.rs index 59f108fc3..7ffe9b908 100644 --- a/meilisearch-auth/src/action.rs +++ b/meilisearch-auth/src/action.rs @@ -14,8 +14,8 @@ pub enum Action { DocumentsGet = actions::DOCUMENTS_GET, #[serde(rename = "documents.delete")] DocumentsDelete = actions::DOCUMENTS_DELETE, - #[serde(rename = "indexes.add")] - IndexesAdd = actions::INDEXES_ADD, + #[serde(rename = "indexes.create")] + IndexesAdd = actions::INDEXES_CREATE, #[serde(rename = "indexes.get")] IndexesGet = actions::INDEXES_GET, #[serde(rename = "indexes.update")] @@ -47,7 +47,7 @@ impl Action { DOCUMENTS_ADD => Some(Self::DocumentsAdd), DOCUMENTS_GET => Some(Self::DocumentsGet), DOCUMENTS_DELETE => Some(Self::DocumentsDelete), - INDEXES_ADD => Some(Self::IndexesAdd), + INDEXES_CREATE => Some(Self::IndexesAdd), INDEXES_GET => Some(Self::IndexesGet), INDEXES_UPDATE => Some(Self::IndexesUpdate), INDEXES_DELETE => Some(Self::IndexesDelete), @@ -70,7 +70,7 @@ impl Action { Self::DocumentsAdd => DOCUMENTS_ADD, Self::DocumentsGet => DOCUMENTS_GET, Self::DocumentsDelete => DOCUMENTS_DELETE, - Self::IndexesAdd => INDEXES_ADD, + Self::IndexesAdd => INDEXES_CREATE, Self::IndexesGet => INDEXES_GET, Self::IndexesUpdate => INDEXES_UPDATE, Self::IndexesDelete => INDEXES_DELETE, @@ -90,7 +90,7 @@ pub mod actions { pub const DOCUMENTS_ADD: u8 = 2; pub const DOCUMENTS_GET: u8 = 3; pub const DOCUMENTS_DELETE: u8 = 4; - pub const INDEXES_ADD: u8 = 5; + pub const INDEXES_CREATE: u8 = 5; pub const INDEXES_GET: u8 = 6; pub const INDEXES_UPDATE: u8 = 7; pub const INDEXES_DELETE: u8 = 8; diff --git a/meilisearch-auth/src/dump.rs b/meilisearch-auth/src/dump.rs new file mode 100644 index 000000000..f93221ed6 --- /dev/null +++ b/meilisearch-auth/src/dump.rs @@ -0,0 +1,40 @@ +use std::fs::File; +use std::io::BufRead; +use std::io::BufReader; +use std::io::Write; +use std::path::Path; + +use crate::{AuthController, HeedAuthStore, Result}; + +const KEYS_PATH: &str = "keys"; + +impl AuthController { + pub fn dump(src: impl AsRef, dst: impl AsRef) -> Result<()> { + let store = HeedAuthStore::new(&src)?; + + let keys_file_path = dst.as_ref().join(KEYS_PATH); + + let keys = store.list_api_keys()?; + let mut keys_file = File::create(&keys_file_path)?; + for key in keys { + serde_json::to_writer(&mut keys_file, &key)?; + keys_file.write_all(b"\n")?; + } + + Ok(()) + } + + pub fn load_dump(src: impl AsRef, dst: impl AsRef) -> Result<()> { + let store = HeedAuthStore::new(&dst)?; + + let keys_file_path = src.as_ref().join(KEYS_PATH); + + let mut reader = BufReader::new(File::open(&keys_file_path)?).lines(); + while let Some(key) = reader.next().transpose()? { + let key = serde_json::from_str(&key)?; + store.put_api_key(key)?; + } + + Ok(()) + } +} diff --git a/meilisearch-auth/src/error.rs b/meilisearch-auth/src/error.rs index 24ea88ff6..8fa6b8430 100644 --- a/meilisearch-auth/src/error.rs +++ b/meilisearch-auth/src/error.rs @@ -24,7 +24,12 @@ pub enum AuthControllerError { Internal(Box), } -internal_error!(AuthControllerError: heed::Error, std::io::Error); +internal_error!( + AuthControllerError: heed::Error, + std::io::Error, + serde_json::Error, + std::str::Utf8Error +); impl ErrorCode for AuthControllerError { fn error_code(&self) -> Code { diff --git a/meilisearch-auth/src/key.rs b/meilisearch-auth/src/key.rs index 358630a40..51b9016ad 100644 --- a/meilisearch-auth/src/key.rs +++ b/meilisearch-auth/src/key.rs @@ -1,7 +1,7 @@ use crate::action::Action; use crate::error::{AuthControllerError, Result}; use crate::store::{KeyId, KEY_ID_LENGTH}; -use chrono::{DateTime, Utc}; +use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc}; use rand::Rng; use serde::{Deserialize, Serialize}; use serde_json::{from_value, Value}; @@ -48,11 +48,8 @@ impl Key { let expires_at = value .get("expiresAt") - .map(|exp| { - from_value(exp.clone()) - .map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(exp.clone())) - }) - .transpose()?; + .map(parse_expiration_date) + .ok_or(AuthControllerError::MissingParameter("expiresAt"))??; let created_at = Utc::now(); let updated_at = Utc::now(); @@ -88,9 +85,7 @@ impl Key { } if let Some(exp) = value.get("expiresAt") { - let exp = from_value(exp.clone()) - .map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(exp.clone())); - self.expires_at = exp?; + self.expires_at = parse_expiration_date(exp)?; } self.updated_at = Utc::now(); @@ -137,3 +132,30 @@ fn generate_id() -> [u8; KEY_ID_LENGTH] { bytes } + +fn parse_expiration_date(value: &Value) -> Result>> { + match value { + Value::String(string) => DateTime::parse_from_rfc3339(string) + .map(|d| d.into()) + .or_else(|_| { + NaiveDateTime::parse_from_str(string, "%Y-%m-%dT%H:%M:%S") + .map(|naive| DateTime::from_utc(naive, Utc)) + }) + .or_else(|_| { + NaiveDate::parse_from_str(string, "%Y-%m-%d") + .map(|naive| DateTime::from_utc(naive.and_hms(0, 0, 0), Utc)) + }) + .map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone())) + // check if the key is already expired. + .and_then(|d| { + if d > Utc::now() { + Ok(d) + } else { + Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone())) + } + }) + .map(Option::Some), + Value::Null => Ok(None), + _otherwise => Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone())), + } +} diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index de8a053c9..ccd5cfca2 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -1,4 +1,5 @@ mod action; +mod dump; pub mod error; mod key; mod store; @@ -68,6 +69,11 @@ impl AuthController { if !key.indexes.iter().any(|i| i.as_str() == "*") { filters.indexes = Some(key.indexes); } + + filters.allow_index_creation = key + .actions + .iter() + .any(|&action| action == Action::IndexesAdd || action == Action::All); } Ok(filters) @@ -104,7 +110,7 @@ impl AuthController { None => self.store.prefix_first_expiration_date(token, action)?, }) { - let id = from_utf8(&id).map_err(|e| AuthControllerError::Internal(Box::new(e)))?; + let id = from_utf8(&id)?; if exp.map_or(true, |exp| Utc::now() < exp) && generate_key(master_key.as_bytes(), id).as_bytes() == token { @@ -117,9 +123,18 @@ impl AuthController { } } -#[derive(Default)] pub struct AuthFilter { pub indexes: Option>, + pub allow_index_creation: bool, +} + +impl Default for AuthFilter { + fn default() -> Self { + Self { + indexes: None, + allow_index_creation: true, + } + } } pub fn generate_key(master_key: &[u8], uid: &str) -> String { diff --git a/meilisearch-auth/src/store.rs b/meilisearch-auth/src/store.rs index 7672efbca..061b4add3 100644 --- a/meilisearch-auth/src/store.rs +++ b/meilisearch-auth/src/store.rs @@ -1,5 +1,6 @@ use enum_iterator::IntoEnumIterator; use std::borrow::Cow; +use std::cmp::Reverse; use std::convert::TryFrom; use std::convert::TryInto; use std::fs::create_dir_all; @@ -130,6 +131,7 @@ impl HeedAuthStore { let (_, content) = result?; list.push(content); } + list.sort_unstable_by_key(|k| Reverse(k.created_at)); Ok(list) } diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index a4a6ef5a1..0f4e8fa54 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -67,7 +67,7 @@ serde_json = { version = "1.0.67", features = ["preserve_order"] } sha2 = "0.9.6" siphasher = "0.3.7" slice-group-by = "0.2.6" -structopt = "0.3.23" +structopt = "0.3.25" sysinfo = "0.20.2" tar = "0.4.37" tempfile = "3.2.0" @@ -103,5 +103,5 @@ default = ["analytics", "mini-dashboard"] tikv-jemallocator = "0.4.1" [package.metadata.mini-dashboard] -assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.5/build.zip" -sha1 = "1d955ea91b7691bd6fc207cb39866b82210783f0" +assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.7/build.zip" +sha1 = "e2feedf271917c4b7b88998eff5aaaea1d3925b9" diff --git a/meilisearch-http/src/analytics/segment_analytics.rs b/meilisearch-http/src/analytics/segment_analytics.rs index 4a5cb302c..86be0f432 100644 --- a/meilisearch-http/src/analytics/segment_analytics.rs +++ b/meilisearch-http/src/analytics/segment_analytics.rs @@ -77,7 +77,7 @@ impl SegmentAnalytics { let user = User::UserId { user_id }; let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string()); - // If Meilisearch is Launched for the first time: + // If MeiliSearch is Launched for the first time: // 1. Send an event Launched associated to the user `total_launch`. // 2. Batch an event Launched with the real instance-id and send it in one hour. if first_time_run { diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index af747ac3a..2c960578a 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -32,7 +32,7 @@ impl Deref for GuardedData { } impl FromRequest for GuardedData { - type Config = AuthConfig; + type Config = (); type Error = ResponseError; @@ -42,49 +42,44 @@ impl FromRequest for GuardedData req: &actix_web::HttpRequest, _payload: &mut actix_web::dev::Payload, ) -> Self::Future { - match req.app_data::() { - Some(config) => match config { - AuthConfig::NoAuth => match req.app_data::().cloned() { - Some(data) => ok(Self { - data, - filters: AuthFilter::default(), - _marker: PhantomData, - }), - None => err(AuthenticationError::IrretrievableState.into()), + match req.app_data::().cloned() { + Some(auth) => match req + .headers() + .get("Authorization") + .map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' ')) + { + Some(mut type_token) => match type_token.next() { + Some("Bearer") => { + // TODO: find a less hardcoded way? + let index = req.match_info().get("index_uid"); + let token = type_token.next().unwrap_or("unknown"); + match P::authenticate(auth, token, index) { + Some(filters) => match req.app_data::().cloned() { + Some(data) => ok(Self { + data, + filters, + _marker: PhantomData, + }), + None => err(AuthenticationError::IrretrievableState.into()), + }, + None => { + let token = token.to_string(); + err(AuthenticationError::InvalidToken(token).into()) + } + } + } + _otherwise => err(AuthenticationError::MissingAuthorizationHeader.into()), }, - AuthConfig::Auth => match req.app_data::().cloned() { - Some(auth) => match req - .headers() - .get("Authorization") - .map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' ')) - { - Some(mut type_token) => match type_token.next() { - Some("Bearer") => { - // TODO: find a less hardcoded way? - let index = req.match_info().get("index_uid"); - let token = type_token.next().unwrap_or("unknown"); - match P::authenticate(auth, token, index) { - Some(filters) => match req.app_data::().cloned() { - Some(data) => ok(Self { - data, - filters, - _marker: PhantomData, - }), - None => err(AuthenticationError::IrretrievableState.into()), - }, - None => { - let token = token.to_string(); - err(AuthenticationError::InvalidToken(token).into()) - } - } - } - _otherwise => { - err(AuthenticationError::MissingAuthorizationHeader.into()) - } - }, - None => err(AuthenticationError::MissingAuthorizationHeader.into()), + None => match P::authenticate(auth, "", None) { + Some(filters) => match req.app_data::().cloned() { + Some(data) => ok(Self { + data, + filters, + _marker: PhantomData, + }), + None => err(AuthenticationError::IrretrievableState.into()), }, - None => err(AuthenticationError::IrretrievableState.into()), + None => err(AuthenticationError::MissingAuthorizationHeader.into()), }, }, None => err(AuthenticationError::IrretrievableState.into()), @@ -129,10 +124,8 @@ pub mod policies { index: Option<&str>, ) -> Option { // authenticate if token is the master key. - if let Some(master_key) = auth.get_master_key() { - if master_key == token { - return Some(AuthFilter::default()); - } + if auth.get_master_key().map_or(true, |mk| mk == token) { + return Some(AuthFilter::default()); } // authenticate if token is allowed. @@ -147,13 +140,3 @@ pub mod policies { } } } -pub enum AuthConfig { - NoAuth, - Auth, -} - -impl Default for AuthConfig { - fn default() -> Self { - Self::NoAuth - } -} diff --git a/meilisearch-http/src/lib.rs b/meilisearch-http/src/lib.rs index c90fc5185..f8e2357fd 100644 --- a/meilisearch-http/src/lib.rs +++ b/meilisearch-http/src/lib.rs @@ -13,7 +13,6 @@ use std::sync::Arc; use std::time::Duration; use crate::error::MeilisearchHttpError; -use crate::extractors::authentication::AuthConfig; use actix_web::error::JsonPayloadError; use analytics::Analytics; use error::PayloadError; @@ -25,31 +24,6 @@ use actix_web::{web, HttpRequest}; use extractors::payload::PayloadConfig; use meilisearch_auth::AuthController; use meilisearch_lib::MeiliSearch; -use sha2::Digest; - -#[derive(Clone)] -pub struct ApiKeys { - pub public: Option, - pub private: Option, - pub master: Option, -} - -impl ApiKeys { - pub fn generate_missing_api_keys(&mut self) { - if let Some(master_key) = &self.master { - if self.private.is_none() { - let key = format!("{}-private", master_key); - let sha = sha2::Sha256::digest(key.as_bytes()); - self.private = Some(format!("{:x}", sha)); - } - if self.public.is_none() { - let key = format!("{}-public", master_key); - let sha = sha2::Sha256::digest(key.as_bytes()); - self.public = Some(format!("{:x}", sha)); - } - } - } -} pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result { let mut meilisearch = MeiliSearch::builder(); @@ -113,16 +87,6 @@ pub fn configure_data( ); } -pub fn configure_auth(config: &mut web::ServiceConfig, opts: &Opt) { - let auth_config = if opts.master_key.is_some() { - AuthConfig::Auth - } else { - AuthConfig::NoAuth - }; - - config.app_data(auth_config); -} - #[cfg(feature = "mini-dashboard")] pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) { use actix_web::HttpResponse; @@ -170,17 +134,16 @@ macro_rules! create_app { use meilisearch_error::ResponseError; use meilisearch_http::error::MeilisearchHttpError; use meilisearch_http::routes; - use meilisearch_http::{configure_auth, configure_data, dashboard}; + use meilisearch_http::{configure_data, dashboard}; App::new() .configure(|s| configure_data(s, $data.clone(), $auth.clone(), &$opt, $analytics)) - .configure(|s| configure_auth(s, &$opt)) .configure(routes::configure) .configure(|s| dashboard(s, $enable_frontend)) .wrap( Cors::default() .send_wildcard() - .allowed_headers(vec!["content-type", "x-meili-api-key"]) + .allow_any_header() .allow_any_origin() .allow_any_method() .max_age(86_400), // 24h diff --git a/meilisearch-http/src/main.rs b/meilisearch-http/src/main.rs index 27c310f3a..955321a6f 100644 --- a/meilisearch-http/src/main.rs +++ b/meilisearch-http/src/main.rs @@ -50,7 +50,7 @@ async fn main() -> anyhow::Result<()> { let auth_controller = AuthController::new(&opt.db_path, &opt.master_key)?; #[cfg(all(not(debug_assertions), feature = "analytics"))] - let (analytics, user) = if !opt.no_analytics { + let (analytics, user) = if opt.analytics() { analytics::SegmentAnalytics::new(&opt, &meilisearch).await } else { analytics::MockAnalytics::new(&opt) @@ -125,9 +125,7 @@ pub fn print_launch_resume(opt: &Opt, user: &str) { #[cfg(all(not(debug_assertions), feature = "analytics"))] { - if opt.no_analytics { - eprintln!("Anonymous telemetry:\t\"Disabled\""); - } else { + if opt.analytics() { eprintln!( " Thank you for using MeiliSearch! @@ -136,6 +134,8 @@ We collect anonymized analytics to improve our product and your experience. To l Anonymous telemetry:\t\"Enabled\"" ); + } else { + eprintln!("Anonymous telemetry:\t\"Disabled\""); } } diff --git a/meilisearch-http/src/option.rs b/meilisearch-http/src/option.rs index d6b2a39dd..c6f783074 100644 --- a/meilisearch-http/src/option.rs +++ b/meilisearch-http/src/option.rs @@ -38,7 +38,7 @@ pub struct Opt { /// Do not send analytics to Meili. #[cfg(all(not(debug_assertions), feature = "analytics"))] #[structopt(long, env = "MEILI_NO_ANALYTICS")] - pub no_analytics: bool, + pub no_analytics: Option>, /// The maximum size, in bytes, of the main lmdb database directory #[structopt(long, env = "MEILI_MAX_INDEX_SIZE", default_value = "100 GiB")] @@ -129,6 +129,16 @@ pub struct Opt { } impl Opt { + /// Wether analytics should be enabled or not. + #[cfg(all(not(debug_assertions), feature = "analytics"))] + pub fn analytics(&self) -> bool { + match self.no_analytics { + None => true, + Some(None) => false, + Some(Some(disabled)) => !disabled, + } + } + pub fn get_ssl_config(&self) -> anyhow::Result> { if let (Some(cert_path), Some(key_path)) = (&self.ssl_cert_path, &self.ssl_key_path) { let client_auth = match &self.ssl_auth_path { diff --git a/meilisearch-http/src/routes/api_key.rs b/meilisearch-http/src/routes/api_key.rs index e77685493..6d08dee71 100644 --- a/meilisearch-http/src/routes/api_key.rs +++ b/meilisearch-http/src/routes/api_key.rs @@ -1,8 +1,8 @@ use std::str; use actix_web::{web, HttpRequest, HttpResponse}; -use chrono::{DateTime, Utc}; -use log::debug; +use chrono::SecondsFormat; + use meilisearch_auth::{generate_key, Action, AuthController, Key}; use serde::{Deserialize, Serialize}; use serde_json::Value; @@ -32,7 +32,6 @@ pub async fn create_api_key( let key = auth_controller.create_key(body.into_inner()).await?; let res = KeyView::from_key(key, auth_controller.get_master_key()); - debug!("returns: {:?}", res); Ok(HttpResponse::Created().json(res)) } @@ -46,8 +45,7 @@ pub async fn list_api_keys( .map(|k| KeyView::from_key(k, auth_controller.get_master_key())) .collect(); - debug!("returns: {:?}", res); - Ok(HttpResponse::Ok().json(res)) + Ok(HttpResponse::Ok().json(KeyListView::from(res))) } pub async fn get_api_key( @@ -58,7 +56,6 @@ pub async fn get_api_key( let key = auth_controller.get_key(&path.api_key).await?; let res = KeyView::from_key(key, auth_controller.get_master_key()); - debug!("returns: {:?}", res); Ok(HttpResponse::Ok().json(res)) } @@ -73,7 +70,6 @@ pub async fn patch_api_key( .await?; let res = KeyView::from_key(key, auth_controller.get_master_key()); - debug!("returns: {:?}", res); Ok(HttpResponse::Ok().json(res)) } @@ -84,7 +80,7 @@ pub async fn delete_api_key( // keep 8 first characters that are the ID of the API key. auth_controller.delete_key(&path.api_key).await?; - Ok(HttpResponse::NoContent().json(())) + Ok(HttpResponse::NoContent().finish()) } #[derive(Deserialize)] @@ -95,14 +91,13 @@ pub struct AuthParam { #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] struct KeyView { - #[serde(skip_serializing_if = "Option::is_none")] description: Option, key: String, actions: Vec, indexes: Vec, - expires_at: Option>, - created_at: DateTime, - updated_at: DateTime, + expires_at: Option, + created_at: String, + updated_at: String, } impl KeyView { @@ -118,9 +113,22 @@ impl KeyView { key: generated_key, actions: key.actions, indexes: key.indexes, - expires_at: key.expires_at, - created_at: key.created_at, - updated_at: key.updated_at, + expires_at: key + .expires_at + .map(|dt| dt.to_rfc3339_opts(SecondsFormat::Secs, true)), + created_at: key.created_at.to_rfc3339_opts(SecondsFormat::Secs, true), + updated_at: key.updated_at.to_rfc3339_opts(SecondsFormat::Secs, true), } } } + +#[derive(Debug, Serialize)] +struct KeyListView { + results: Vec, +} + +impl From> for KeyListView { + fn from(results: Vec) -> Self { + Self { results } + } +} diff --git a/meilisearch-http/src/routes/indexes/documents.rs b/meilisearch-http/src/routes/indexes/documents.rs index 293b996a1..d18c600af 100644 --- a/meilisearch-http/src/routes/indexes/documents.rs +++ b/meilisearch-http/src/routes/indexes/documents.rs @@ -173,6 +173,7 @@ pub async fn add_documents( &req, ); + let allow_index_creation = meilisearch.filters().allow_index_creation; let task = document_addition( extract_mime_type(&req)?, meilisearch, @@ -180,6 +181,7 @@ pub async fn add_documents( params.primary_key, body, IndexDocumentsMethod::ReplaceDocuments, + allow_index_creation, ) .await?; @@ -203,6 +205,7 @@ pub async fn update_documents( &req, ); + let allow_index_creation = meilisearch.filters().allow_index_creation; let task = document_addition( extract_mime_type(&req)?, meilisearch, @@ -210,6 +213,7 @@ pub async fn update_documents( params.into_inner().primary_key, body, IndexDocumentsMethod::UpdateDocuments, + allow_index_creation, ) .await?; @@ -223,6 +227,7 @@ async fn document_addition( primary_key: Option, body: Payload, method: IndexDocumentsMethod, + allow_index_creation: bool, ) -> Result { let format = match mime_type .as_ref() @@ -250,6 +255,7 @@ async fn document_addition( primary_key, method, format, + allow_index_creation, }; let task = meilisearch.register_update(index_uid, update).await?.into(); diff --git a/meilisearch-http/src/routes/indexes/mod.rs b/meilisearch-http/src/routes/indexes/mod.rs index 485e03bc4..c24a5e662 100644 --- a/meilisearch-http/src/routes/indexes/mod.rs +++ b/meilisearch-http/src/routes/indexes/mod.rs @@ -62,7 +62,7 @@ pub struct IndexCreateRequest { } pub async fn create_index( - meilisearch: GuardedData, MeiliSearch>, + meilisearch: GuardedData, MeiliSearch>, body: web::Json, req: HttpRequest, analytics: web::Data, diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 45412a928..8b38072c4 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -34,9 +34,12 @@ macro_rules! make_setting_route { $attr: Setting::Reset, ..Default::default() }; + + let allow_index_creation = meilisearch.filters().allow_index_creation; let update = Update::Settings { settings, is_deletion: true, + allow_index_creation, }; let task: SummarizedTaskView = meilisearch .register_update(index_uid.into_inner(), update) @@ -66,9 +69,11 @@ macro_rules! make_setting_route { ..Default::default() }; + let allow_index_creation = meilisearch.filters().allow_index_creation; let update = Update::Settings { settings, is_deletion: false, + allow_index_creation, }; let task: SummarizedTaskView = meilisearch .register_update(index_uid.into_inner(), update) @@ -272,9 +277,11 @@ pub async fn update_all( Some(&req), ); + let allow_index_creation = meilisearch.filters().allow_index_creation; let update = Update::Settings { settings, is_deletion: false, + allow_index_creation, }; let task: SummarizedTaskView = meilisearch .register_update(index_uid.into_inner(), update) @@ -300,9 +307,11 @@ pub async fn delete_all( ) -> Result { let settings = Settings::cleared().into_unchecked(); + let allow_index_creation = data.filters().allow_index_creation; let update = Update::Settings { settings, is_deletion: true, + allow_index_creation, }; let task: SummarizedTaskView = data .register_update(index_uid.into_inner(), update) diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index f2fde7754..c859dc68f 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -117,7 +117,7 @@ impl IndexUpdateResponse { /// Always return a 200 with: /// ```json /// { -/// "status": "Meilisearch is running" +/// "status": "MeiliSearch is running" /// } /// ``` pub async fn running() -> HttpResponse { diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 158d8570c..09e0a21b6 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -1,6 +1,7 @@ use actix_web::{web, HttpRequest, HttpResponse}; use meilisearch_error::ResponseError; use meilisearch_lib::tasks::task::TaskId; +use meilisearch_lib::tasks::TaskFilter; use meilisearch_lib::MeiliSearch; use serde_json::json; @@ -24,8 +25,16 @@ async fn get_tasks( Some(&req), ); + let filters = meilisearch.filters().indexes.as_ref().map(|indexes| { + let mut filters = TaskFilter::default(); + for index in indexes { + filters.filter_index(index.to_string()); + } + filters + }); + let tasks: TaskListView = meilisearch - .list_tasks(None, None, None) + .list_tasks(filters, None, None) .await? .into_iter() .map(TaskView::from) @@ -47,8 +56,16 @@ async fn get_task( Some(&req), ); + let filters = meilisearch.filters().indexes.as_ref().map(|indexes| { + let mut filters = TaskFilter::default(); + for index in indexes { + filters.filter_index(index.to_string()); + } + filters + }); + let task: TaskView = meilisearch - .get_task(task_id.into_inner(), None) + .get_task(task_id.into_inner(), filters) .await? .into(); diff --git a/meilisearch-http/src/task.rs b/meilisearch-http/src/task.rs index 1eba76ca8..990e9d0b7 100644 --- a/meilisearch-http/src/task.rs +++ b/meilisearch-http/src/task.rs @@ -13,9 +13,9 @@ enum TaskType { IndexCreation, IndexUpdate, IndexDeletion, - DocumentsAddition, - DocumentsPartial, - DocumentsDeletion, + DocumentAddition, + DocumentPartial, + DocumentDeletion, SettingsUpdate, ClearAll, } @@ -26,13 +26,13 @@ impl From for TaskType { TaskContent::DocumentAddition { merge_strategy: IndexDocumentsMethod::ReplaceDocuments, .. - } => TaskType::DocumentsAddition, + } => TaskType::DocumentAddition, TaskContent::DocumentAddition { merge_strategy: IndexDocumentsMethod::UpdateDocuments, .. - } => TaskType::DocumentsPartial, + } => TaskType::DocumentPartial, TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll, - TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentsDeletion, + TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion, TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate, TaskContent::IndexDeletion => TaskType::IndexDeletion, TaskContent::IndexCreation { .. } => TaskType::IndexCreation, @@ -56,7 +56,7 @@ enum TaskStatus { #[allow(clippy::large_enum_variant)] enum TaskDetails { #[serde(rename_all = "camelCase")] - DocumentsAddition { + DocumentAddition { received_documents: usize, indexed_documents: Option, }, @@ -123,21 +123,21 @@ impl From for TaskView { documents_count, .. } => { - let details = TaskDetails::DocumentsAddition { + let details = TaskDetails::DocumentAddition { received_documents: documents_count, indexed_documents: None, }; let task_type = match merge_strategy { - IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentsPartial, - IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentsAddition, + IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial, + IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition, _ => unreachable!("Unexpected document merge strategy."), }; (task_type, Some(details)) } TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => ( - TaskType::DocumentsDeletion, + TaskType::DocumentDeletion, Some(TaskDetails::DocumentDeletion { received_document_ids: ids.len(), deleted_documents: None, @@ -181,7 +181,7 @@ impl From for TaskView { indexed_documents: num, .. }, - Some(TaskDetails::DocumentsAddition { + Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }), @@ -215,6 +215,27 @@ impl From for TaskView { (TaskStatus::Succeeded, None, Some(*timestamp)) } TaskEvent::Failed { timestamp, error } => { + match details { + Some(TaskDetails::DocumentDeletion { + ref mut deleted_documents, + .. + }) => { + deleted_documents.replace(0); + } + Some(TaskDetails::ClearAll { + ref mut deleted_documents, + .. + }) => { + deleted_documents.replace(0); + } + Some(TaskDetails::DocumentAddition { + ref mut indexed_documents, + .. + }) => { + indexed_documents.replace(0); + } + _ => (), + } (TaskStatus::Failed, Some(error.clone()), Some(*timestamp)) } }; diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index 4549b3900..fbe107837 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -1,6 +1,7 @@ use crate::common::Server; use assert_json_diff::assert_json_include; use serde_json::json; +use std::{thread, time}; #[actix_rt::test] async fn add_valid_api_key() { @@ -15,7 +16,7 @@ async fn add_valid_api_key() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -43,7 +44,66 @@ async fn add_valid_api_key() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", + "indexes.get", + "indexes.update", + "indexes.delete", + "tasks.get", + "settings.get", + "settings.update", + "stats.get", + "dumps.create", + "dumps.get" + ], + "expiresAt": "2050-11-13T00:00:00Z" + }); + + assert_json_include!(actual: response, expected: expected_response); + assert_eq!(code, 201); +} + +#[actix_rt::test] +async fn add_valid_api_key_expired_at() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "description": "Indexing API key", + "indexes": ["products"], + "actions": [ + "search", + "documents.add", + "documents.get", + "documents.delete", + "indexes.create", + "indexes.get", + "indexes.update", + "indexes.delete", + "tasks.get", + "settings.get", + "settings.update", + "stats.get", + "dumps.create", + "dumps.get" + ], + "expiresAt": "2050-11-13" + }); + + let (response, code) = server.add_api_key(content).await; + assert!(response["key"].is_string(), "{:?}", response); + assert!(response["expiresAt"].is_string()); + assert!(response["createdAt"].is_string()); + assert!(response["updatedAt"].is_string()); + + let expected_response = json!({ + "description": "Indexing API key", + "indexes": ["products"], + "actions": [ + "search", + "documents.add", + "documents.get", + "documents.delete", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -71,7 +131,7 @@ async fn add_valid_api_key_no_description() { "actions": [ "documents.add" ], - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": "2050-11-13T00:00:00" }); let (response, code) = server.add_api_key(content).await; @@ -153,9 +213,7 @@ async fn error_add_api_key_missing_parameter() { // missing indexes let content = json!({ "description": "Indexing API key", - "actions": [ - "documents.add" - ], + "actions": ["documents.add"], "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; @@ -187,6 +245,24 @@ async fn error_add_api_key_missing_parameter() { assert_eq!(response, expected_response); assert_eq!(code, 400); + + // missing expiration date + let content = json!({ + "description": "Indexing API key", + "indexes": ["products"], + "actions": ["documents.add"], + }); + let (response, code) = server.add_api_key(content).await; + + let expected_response = json!({ + "message": "`expiresAt` field is mandatory.", + "code": "missing_parameter", + "type": "invalid_request", + "link":"https://docs.meilisearch.com/errors#missing_parameter" + }); + + assert_eq!(response, expected_response); + assert_eq!(code, 400); } #[actix_rt::test] @@ -311,6 +387,32 @@ async fn error_add_api_key_invalid_parameters_expires_at() { assert_eq!(code, 400); } +#[actix_rt::test] +async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "description": "Indexing API key", + "indexes": ["products"], + "actions": [ + "documents.add" + ], + "expiresAt": "2010-11-13T00:00:00Z" + }); + let (response, code) = server.add_api_key(content).await; + + let expected_response = json!({ + "message": r#"expiresAt field value `"2010-11-13T00:00:00Z"` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'."#, + "code": "invalid_api_key_expires_at", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" + }); + + assert_eq!(response, expected_response); + assert_eq!(code, 400); +} + #[actix_rt::test] async fn get_api_key() { let mut server = Server::new_auth().await; @@ -324,7 +426,7 @@ async fn get_api_key() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -359,7 +461,7 @@ async fn get_api_key() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -449,7 +551,7 @@ async fn list_api_keys() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -468,81 +570,45 @@ async fn list_api_keys() { assert_eq!(code, 201); let (response, code) = server.list_api_keys().await; - assert!(response.is_array()); - let response = &response.as_array().unwrap(); - let created_key = response - .iter() - .find(|x| x["description"] == "Indexing API key") - .unwrap(); - assert!(created_key["key"].is_string()); - assert!(created_key["expiresAt"].is_string()); - assert!(created_key["createdAt"].is_string()); - assert!(created_key["updatedAt"].is_string()); + let expected_response = json!({ "results": + [ + { + "description": "Indexing API key", + "indexes": ["products"], + "actions": [ + "search", + "documents.add", + "documents.get", + "documents.delete", + "indexes.create", + "indexes.get", + "indexes.update", + "indexes.delete", + "tasks.get", + "settings.get", + "settings.update", + "stats.get", + "dumps.create", + "dumps.get" + ], + "expiresAt": "2050-11-13T00:00:00Z" + }, + { + "description": "Default Search API Key (Use it to search from the frontend)", + "indexes": ["*"], + "actions": ["search"], + "expiresAt": serde_json::Value::Null, + }, + { + "description": "Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)", + "indexes": ["*"], + "actions": ["*"], + "expiresAt": serde_json::Value::Null, + } + ]}); - let expected_response = json!({ - "description": "Indexing API key", - "indexes": ["products"], - "actions": [ - "search", - "documents.add", - "documents.get", - "documents.delete", - "indexes.add", - "indexes.get", - "indexes.update", - "indexes.delete", - "tasks.get", - "settings.get", - "settings.update", - "stats.get", - "dumps.create", - "dumps.get" - ], - "expiresAt": "2050-11-13T00:00:00Z" - }); - - assert_json_include!(actual: created_key, expected: expected_response); - assert_eq!(code, 200); - - // check if default admin key is present. - let admin_key = response - .iter() - .find(|x| x["description"] == "Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)") - .unwrap(); - assert!(created_key["key"].is_string()); - assert!(created_key["expiresAt"].is_string()); - assert!(created_key["createdAt"].is_string()); - assert!(created_key["updatedAt"].is_string()); - - let expected_response = json!({ - "description": "Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)", - "indexes": ["*"], - "actions": ["*"], - "expiresAt": serde_json::Value::Null, - }); - - assert_json_include!(actual: admin_key, expected: expected_response); - assert_eq!(code, 200); - - // check if default search key is present. - let admin_key = response - .iter() - .find(|x| x["description"] == "Default Search API Key (Use it to search from the frontend)") - .unwrap(); - assert!(created_key["key"].is_string()); - assert!(created_key["expiresAt"].is_string()); - assert!(created_key["createdAt"].is_string()); - assert!(created_key["updatedAt"].is_string()); - - let expected_response = json!({ - "description": "Default Search API Key (Use it to search from the frontend)", - "indexes": ["*"], - "actions": ["search"], - "expiresAt": serde_json::Value::Null, - }); - - assert_json_include!(actual: admin_key, expected: expected_response); + assert_json_include!(actual: response, expected: expected_response); assert_eq!(code, 200); } @@ -594,7 +660,7 @@ async fn delete_api_key() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -694,7 +760,7 @@ async fn patch_api_key_description() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -719,6 +785,7 @@ async fn patch_api_key_description() { // Add a description let content = json!({ "description": "Indexing API key" }); + thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&key, content).await; assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); @@ -734,7 +801,7 @@ async fn patch_api_key_description() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -764,7 +831,7 @@ async fn patch_api_key_description() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -793,7 +860,7 @@ async fn patch_api_key_description() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -821,7 +888,7 @@ async fn patch_api_key_indexes() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -845,6 +912,7 @@ async fn patch_api_key_indexes() { let content = json!({ "indexes": ["products", "prices"] }); + thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&key, content).await; assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); @@ -860,7 +928,7 @@ async fn patch_api_key_indexes() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -888,7 +956,7 @@ async fn patch_api_key_actions() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -920,6 +988,7 @@ async fn patch_api_key_actions() { ], }); + thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&key, content).await; assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); @@ -957,7 +1026,7 @@ async fn patch_api_key_expiration_date() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -965,7 +1034,7 @@ async fn patch_api_key_expiration_date() { "dumps.create", "dumps.get" ], - "expiresAt": "205-11-13T00:00:00Z" + "expiresAt": "2050-11-13T00:00:00Z" }); let (response, code) = server.add_api_key(content).await; @@ -981,6 +1050,7 @@ async fn patch_api_key_expiration_date() { let content = json!({ "expiresAt": "2055-11-13T00:00:00Z" }); + thread::sleep(time::Duration::new(1, 0)); let (response, code) = server.patch_api_key(&key, content).await; assert!(response["key"].is_string()); assert!(response["expiresAt"].is_string()); @@ -996,7 +1066,7 @@ async fn patch_api_key_expiration_date() { "documents.add", "documents.get", "documents.delete", - "indexes.add", + "indexes.create", "indexes.get", "indexes.update", "indexes.delete", @@ -1166,3 +1236,65 @@ async fn error_patch_api_key_indexes_invalid_parameters() { assert_eq!(response, expected_response); assert_eq!(code, 400); } + +#[actix_rt::test] +async fn error_access_api_key_routes_no_master_key_set() { + let mut server = Server::new().await; + + let expected_response = json!({ + "message": "The Authorization header is missing. It must use the bearer authorization method.", + "code": "missing_authorization_header", + "type": "auth", + "link": "https://docs.meilisearch.com/errors#missing_authorization_header" + }); + let expected_code = 401; + + let (response, code) = server.add_api_key(json!({})).await; + + assert_eq!(response, expected_response); + assert_eq!(code, expected_code); + + let (response, code) = server.patch_api_key("content", json!({})).await; + + assert_eq!(response, expected_response); + assert_eq!(code, expected_code); + + let (response, code) = server.get_api_key("content").await; + + assert_eq!(response, expected_response); + assert_eq!(code, expected_code); + + let (response, code) = server.list_api_keys().await; + + assert_eq!(response, expected_response); + assert_eq!(code, expected_code); + + server.use_api_key("MASTER_KEY"); + + let expected_response = json!({"message": "The provided API key is invalid.", + "code": "invalid_api_key", + "type": "auth", + "link": "https://docs.meilisearch.com/errors#invalid_api_key" + }); + let expected_code = 403; + + let (response, code) = server.add_api_key(json!({})).await; + + assert_eq!(response, expected_response); + assert_eq!(code, expected_code); + + let (response, code) = server.patch_api_key("content", json!({})).await; + + assert_eq!(response, expected_response); + assert_eq!(code, expected_code); + + let (response, code) = server.get_api_key("content").await; + + assert_eq!(response, expected_response); + assert_eq!(code, expected_code); + + let (response, code) = server.list_api_keys().await; + + assert_eq!(response, expected_response); + assert_eq!(code, expected_code); +} diff --git a/meilisearch-http/tests/auth/authorization.rs b/meilisearch-http/tests/auth/authorization.rs index 1691b5d8f..98e0a1a1d 100644 --- a/meilisearch-http/tests/auth/authorization.rs +++ b/meilisearch-http/tests/auth/authorization.rs @@ -1,4 +1,5 @@ use crate::common::Server; +use chrono::{Duration, Utc}; use maplit::hashmap; use once_cell::sync::Lazy; use serde_json::{json, Value}; @@ -19,7 +20,7 @@ static AUTHORIZATIONS: Lazy> ("PUT", "/indexes/products/") => "indexes.update", ("GET", "/indexes/products/") => "indexes.get", ("DELETE", "/indexes/products/") => "indexes.delete", - ("POST", "/indexes") => "indexes.add", + ("POST", "/indexes") => "indexes.create", ("GET", "/indexes") => "indexes.get", ("GET", "/indexes/products/settings") => "settings.get", ("GET", "/indexes/products/settings/displayed-attributes") => "settings.get", @@ -62,13 +63,15 @@ static INVALID_RESPONSE: Lazy = Lazy::new(|| { #[actix_rt::test] #[cfg_attr(target_os = "windows", ignore)] async fn error_access_expired_key() { + use std::{thread, time}; + let mut server = Server::new_auth().await; server.use_api_key("MASTER_KEY"); let content = json!({ "indexes": ["products"], "actions": ALL_ACTIONS.clone(), - "expiresAt": "2020-11-13T00:00:00Z" + "expiresAt": (Utc::now() + Duration::seconds(1)), }); let (response, code) = server.add_api_key(content).await; @@ -78,6 +81,9 @@ async fn error_access_expired_key() { let key = response["key"].as_str().unwrap(); server.use_api_key(&key); + // wait until the key is expired. + thread::sleep(time::Duration::new(1, 0)); + for (method, route) in AUTHORIZATIONS.keys() { let (response, code) = server.dummy_request(method, route).await; @@ -95,7 +101,7 @@ async fn error_access_unauthorized_index() { let content = json!({ "indexes": ["sales"], "actions": ALL_ACTIONS.clone(), - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": Utc::now() + Duration::hours(1), }); let (response, code) = server.add_api_key(content).await; @@ -126,7 +132,7 @@ async fn error_access_unauthorized_action() { let content = json!({ "indexes": ["products"], "actions": [], - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": Utc::now() + Duration::hours(1), }); let (response, code) = server.add_api_key(content).await; @@ -163,7 +169,7 @@ async fn access_authorized_restricted_index() { let content = json!({ "indexes": ["products"], "actions": [], - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": Utc::now() + Duration::hours(1), }); let (response, code) = server.add_api_key(content).await; @@ -215,7 +221,7 @@ async fn access_authorized_no_index_restriction() { let content = json!({ "indexes": ["*"], "actions": [], - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": Utc::now() + Duration::hours(1), }); let (response, code) = server.add_api_key(content).await; @@ -278,7 +284,7 @@ async fn access_authorized_stats_restricted_index() { let content = json!({ "indexes": ["products"], "actions": ["stats.get"], - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": Utc::now() + Duration::hours(1), }); let (response, code) = server.add_api_key(content).await; assert_eq!(code, 201); @@ -318,7 +324,7 @@ async fn access_authorized_stats_no_index_restriction() { let content = json!({ "indexes": ["*"], "actions": ["stats.get"], - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": Utc::now() + Duration::hours(1), }); let (response, code) = server.add_api_key(content).await; assert_eq!(code, 201); @@ -358,7 +364,7 @@ async fn list_authorized_indexes_restricted_index() { let content = json!({ "indexes": ["products"], "actions": ["indexes.get"], - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": Utc::now() + Duration::hours(1), }); let (response, code) = server.add_api_key(content).await; assert_eq!(code, 201); @@ -399,7 +405,7 @@ async fn list_authorized_indexes_no_index_restriction() { let content = json!({ "indexes": ["*"], "actions": ["indexes.get"], - "expiresAt": "2050-11-13T00:00:00Z" + "expiresAt": Utc::now() + Duration::hours(1), }); let (response, code) = server.add_api_key(content).await; assert_eq!(code, 201); @@ -419,3 +425,215 @@ async fn list_authorized_indexes_no_index_restriction() { // key should have access on `test` index. assert!(response.iter().any(|index| index["uid"] == "test")); } + +#[actix_rt::test] +async fn list_authorized_tasks_restricted_index() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + // create index `test` + let index = server.index("test"); + let (_, code) = index.create(Some("id")).await; + assert_eq!(code, 202); + // create index `products` + let index = server.index("products"); + let (_, code) = index.create(Some("product_id")).await; + assert_eq!(code, 202); + index.wait_task(0).await; + + // create key with access on `products` index only. + let content = json!({ + "indexes": ["products"], + "actions": ["tasks.get"], + "expiresAt": Utc::now() + Duration::hours(1), + }); + let (response, code) = server.add_api_key(content).await; + assert_eq!(code, 201); + assert!(response["key"].is_string()); + + // use created key. + let key = response["key"].as_str().unwrap(); + server.use_api_key(&key); + + let (response, code) = server.service.get("/tasks").await; + assert_eq!(code, 200); + println!("{}", response); + let response = response["results"].as_array().unwrap(); + // key should have access on `products` index. + assert!(response.iter().any(|task| task["indexUid"] == "products")); + + // key should not have access on `test` index. + assert!(!response.iter().any(|task| task["indexUid"] == "test")); +} + +#[actix_rt::test] +async fn list_authorized_tasks_no_index_restriction() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + // create index `test` + let index = server.index("test"); + let (_, code) = index.create(Some("id")).await; + assert_eq!(code, 202); + // create index `products` + let index = server.index("products"); + let (_, code) = index.create(Some("product_id")).await; + assert_eq!(code, 202); + index.wait_task(0).await; + + // create key with access on all indexes. + let content = json!({ + "indexes": ["*"], + "actions": ["tasks.get"], + "expiresAt": Utc::now() + Duration::hours(1), + }); + let (response, code) = server.add_api_key(content).await; + assert_eq!(code, 201); + assert!(response["key"].is_string()); + + // use created key. + let key = response["key"].as_str().unwrap(); + server.use_api_key(&key); + + let (response, code) = server.service.get("/tasks").await; + assert_eq!(code, 200); + + let response = response["results"].as_array().unwrap(); + // key should have access on `products` index. + assert!(response.iter().any(|task| task["indexUid"] == "products")); + + // key should have access on `test` index. + assert!(response.iter().any(|task| task["indexUid"] == "test")); +} + +#[actix_rt::test] +async fn error_creating_index_without_action() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + // create key with access on all indexes. + let content = json!({ + "indexes": ["*"], + "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "indexes.create").collect::>(), + "expiresAt": "2050-11-13T00:00:00Z" + }); + let (response, code) = server.add_api_key(content).await; + assert_eq!(code, 201); + assert!(response["key"].is_string()); + + // use created key. + let key = response["key"].as_str().unwrap(); + server.use_api_key(&key); + + let expected_error = json!({ + "message": "Index `test` not found.", + "code": "index_not_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#index_not_found" + }); + + // try to create a index via add documents route + let index = server.index("test"); + let documents = json!([ + { + "id": 1, + "content": "foo", + } + ]); + + let (response, code) = index.add_documents(documents, None).await; + assert_eq!(code, 202, "{:?}", response); + let task_id = response["uid"].as_u64().unwrap(); + + let response = index.wait_task(task_id).await; + assert_eq!(response["status"], "failed"); + assert_eq!(response["error"], expected_error.clone()); + + // try to create a index via add settings route + let settings = json!({ "distinctAttribute": "test"}); + + let (response, code) = index.update_settings(settings).await; + assert_eq!(code, 202); + let task_id = response["uid"].as_u64().unwrap(); + + let response = index.wait_task(task_id).await; + + assert_eq!(response["status"], "failed"); + assert_eq!(response["error"], expected_error.clone()); + + // try to create a index via add specialized settings route + let (response, code) = index.update_distinct_attribute(json!("test")).await; + assert_eq!(code, 202); + let task_id = response["uid"].as_u64().unwrap(); + + let response = index.wait_task(task_id).await; + + assert_eq!(response["status"], "failed"); + assert_eq!(response["error"], expected_error.clone()); +} + +#[actix_rt::test] +async fn lazy_create_index() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + // create key with access on all indexes. + let content = json!({ + "indexes": ["*"], + "actions": ["*"], + "expiresAt": "2050-11-13T00:00:00Z" + }); + + let (response, code) = server.add_api_key(content).await; + assert_eq!(code, 201); + assert!(response["key"].is_string()); + + // use created key. + let key = response["key"].as_str().unwrap(); + server.use_api_key(&key); + + // try to create a index via add documents route + let index = server.index("test"); + let documents = json!([ + { + "id": 1, + "content": "foo", + } + ]); + + let (response, code) = index.add_documents(documents, None).await; + assert_eq!(code, 202, "{:?}", response); + let task_id = response["uid"].as_u64().unwrap(); + + index.wait_task(task_id).await; + + let (response, code) = index.get_task(task_id).await; + assert_eq!(code, 200); + assert_eq!(response["status"], "succeeded"); + + // try to create a index via add settings route + let index = server.index("test1"); + let settings = json!({ "distinctAttribute": "test"}); + + let (response, code) = index.update_settings(settings).await; + assert_eq!(code, 202); + let task_id = response["uid"].as_u64().unwrap(); + + index.wait_task(task_id).await; + + let (response, code) = index.get_task(task_id).await; + assert_eq!(code, 200); + assert_eq!(response["status"], "succeeded"); + + // try to create a index via add specialized settings route + let index = server.index("test2"); + let (response, code) = index.update_distinct_attribute(json!("test")).await; + assert_eq!(code, 202); + let task_id = response["uid"].as_u64().unwrap(); + + index.wait_task(task_id).await; + + let (response, code) = index.get_task(task_id).await; + assert_eq!(code, 200); + assert_eq!(response["status"], "succeeded"); +} diff --git a/meilisearch-http/tests/auth/mod.rs b/meilisearch-http/tests/auth/mod.rs index 4d5d043bd..a7ae7c592 100644 --- a/meilisearch-http/tests/auth/mod.rs +++ b/meilisearch-http/tests/auth/mod.rs @@ -2,29 +2,12 @@ mod api_keys; mod authorization; mod payload; -use crate::common::server::default_settings; -use crate::common::server::TEST_TEMP_DIR; use crate::common::Server; use actix_web::http::StatusCode; + use serde_json::{json, Value}; -use tempfile::TempDir; impl Server { - pub async fn new_auth() -> Self { - let dir = TempDir::new().unwrap(); - - if cfg!(windows) { - std::env::set_var("TMP", TEST_TEMP_DIR.path()); - } else { - std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()); - } - - let mut options = default_settings(dir.path()); - options.master_key = Some("MASTER_KEY".to_string()); - - Self::new_with_options(options).await - } - pub fn use_api_key(&mut self, api_key: impl AsRef) { self.service.api_key = Some(api_key.as_ref().to_string()); } diff --git a/meilisearch-http/tests/common/server.rs b/meilisearch-http/tests/common/server.rs index 51c7b5611..0fb5eacfb 100644 --- a/meilisearch-http/tests/common/server.rs +++ b/meilisearch-http/tests/common/server.rs @@ -1,3 +1,4 @@ +#![allow(dead_code)] use std::path::Path; use actix_web::http::StatusCode; @@ -49,6 +50,33 @@ impl Server { } } + pub async fn new_auth() -> Self { + let dir = TempDir::new().unwrap(); + + if cfg!(windows) { + std::env::set_var("TMP", TEST_TEMP_DIR.path()); + } else { + std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()); + } + + let mut options = default_settings(dir.path()); + options.master_key = Some("MASTER_KEY".to_string()); + + let meilisearch = setup_meilisearch(&options).unwrap(); + let auth = AuthController::new(&options.db_path, &options.master_key).unwrap(); + let service = Service { + meilisearch, + auth, + options, + api_key: None, + }; + + Server { + service, + _dir: Some(dir), + } + } + pub async fn new_with_options(options: Opt) -> Self { let meilisearch = setup_meilisearch(&options).unwrap(); let auth = AuthController::new(&options.db_path, &options.master_key).unwrap(); @@ -88,6 +116,10 @@ impl Server { pub async fn tasks(&self) -> (Value, StatusCode) { self.service.get("/tasks").await } + + pub async fn get_dump_status(&self, uid: &str) -> (Value, StatusCode) { + self.service.get(format!("/dumps/{}/status", uid)).await + } } pub fn default_settings(dir: impl AsRef) -> Opt { @@ -98,7 +130,7 @@ pub fn default_settings(dir: impl AsRef) -> Opt { master_key: None, env: "development".to_owned(), #[cfg(all(not(debug_assertions), feature = "analytics"))] - no_analytics: true, + no_analytics: Some(Some(true)), max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(), max_task_db_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(), http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(), diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index 6556b675b..f6d55409d 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -563,7 +563,7 @@ async fn add_documents_no_index_creation() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentsAddition"); + assert_eq!(response["type"], "documentAddition"); assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1); @@ -633,7 +633,7 @@ async fn document_addition_with_primary_key() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentsAddition"); + assert_eq!(response["type"], "documentAddition"); assert_eq!(response["details"]["receivedDocuments"], 1); assert_eq!(response["details"]["indexedDocuments"], 1); @@ -662,7 +662,7 @@ async fn document_update_with_primary_key() { assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); assert_eq!(response["uid"], 0); - assert_eq!(response["type"], "documentsPartial"); + assert_eq!(response["type"], "documentPartial"); assert_eq!(response["details"]["indexedDocuments"], 1); assert_eq!(response["details"]["receivedDocuments"], 1); @@ -775,7 +775,7 @@ async fn add_larger_dataset() { let (response, code) = index.get_task(update_id).await; assert_eq!(code, 200); assert_eq!(response["status"], "succeeded"); - assert_eq!(response["type"], "documentsAddition"); + assert_eq!(response["type"], "documentAddition"); assert_eq!(response["details"]["indexedDocuments"], 77); assert_eq!(response["details"]["receivedDocuments"], 77); let (response, code) = index @@ -797,7 +797,7 @@ async fn update_larger_dataset() { index.wait_task(0).await; let (response, code) = index.get_task(0).await; assert_eq!(code, 200); - assert_eq!(response["type"], "documentsPartial"); + assert_eq!(response["type"], "documentPartial"); assert_eq!(response["details"]["indexedDocuments"], 77); let (response, code) = index .get_all_documents(GetAllDocumentsOptions { @@ -1032,3 +1032,26 @@ async fn error_primary_key_inference() { assert_eq!(response["error"], expected_error); } + +#[actix_rt::test] +async fn add_documents_with_primary_key_twice() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = json!([ + { + "title": "11", + "desc": "foobar" + } + ]); + + index.add_documents(documents.clone(), Some("title")).await; + index.wait_task(0).await; + let (response, _code) = index.get_task(0).await; + assert_eq!(response["status"], "succeeded"); + + index.add_documents(documents, Some("title")).await; + index.wait_task(1).await; + let (response, _code) = index.get_task(1).await; + assert_eq!(response["status"], "succeeded"); +} diff --git a/meilisearch-http/tests/dumps.rs b/meilisearch-http/tests/dumps.rs new file mode 100644 index 000000000..843347bde --- /dev/null +++ b/meilisearch-http/tests/dumps.rs @@ -0,0 +1,22 @@ +#![allow(dead_code)] +mod common; + +use crate::common::Server; +use serde_json::json; + +#[actix_rt::test] +async fn get_unexisting_dump_status() { + let server = Server::new().await; + + let (response, code) = server.get_dump_status("foobar").await; + assert_eq!(code, 404); + + let expected_response = json!({ + "message": "Dump `foobar` not found.", + "code": "dump_not_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#dump_not_found" + }); + + assert_eq!(response, expected_response); +} diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index daba2e6ec..3edb89376 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -117,13 +117,13 @@ async fn test_summarized_task_view() { assert_valid_summarized_task!(response, "settingsUpdate", "test"); let (response, _) = index.update_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentsPartial", "test"); + assert_valid_summarized_task!(response, "documentPartial", "test"); let (response, _) = index.add_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentsAddition", "test"); + assert_valid_summarized_task!(response, "documentAddition", "test"); let (response, _) = index.delete_document(1).await; - assert_valid_summarized_task!(response, "documentsDeletion", "test"); + assert_valid_summarized_task!(response, "documentDeletion", "test"); let (response, _) = index.clear_all_documents().await; assert_valid_summarized_task!(response, "clearAll", "test"); diff --git a/meilisearch-lib/Cargo.toml b/meilisearch-lib/Cargo.toml index 5650055be..ade1a46af 100644 --- a/meilisearch-lib/Cargo.toml +++ b/meilisearch-lib/Cargo.toml @@ -28,6 +28,7 @@ itertools = "0.10.1" lazy_static = "1.4.0" log = "0.4.14" meilisearch-error = { path = "../meilisearch-error" } +meilisearch-auth = { path = "../meilisearch-auth" } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.21.0" } mime = "0.3.16" num_cpus = "1.13.0" diff --git a/meilisearch-lib/proptest-regressions/index_resolver/mod.txt b/meilisearch-lib/proptest-regressions/index_resolver/mod.txt index 553b8f1d5..583db4918 100644 --- a/meilisearch-lib/proptest-regressions/index_resolver/mod.txt +++ b/meilisearch-lib/proptest-regressions/index_resolver/mod.txt @@ -17,3 +17,4 @@ cc 3a01c78db082434b8a4f8914abf0d1059d39f4426d16df20d72e1bd7ebb94a6a # shrinks to cc c450806df3921d1e6fe9b6af93d999e8196d0175b69b64f1810802582421e94a # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: CreateIndex { primary_key: Some("") }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0 cc fb6b98947cbdbdee05ed3c0bf2923aad2c311edc276253642eb43a0c0ec4888a # shrinks to task = Task { id: 0, index_uid: IndexUid("A"), content: CreateIndex { primary_key: Some("") }, events: [] }, index_exists = false, index_op_fails = true, any_int = 0 cc 1aa59d8e22484e9915efbb5818e1e1ab684aa61b166dc82130d6221663ba00bf # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: DocumentDeletion(Clear), events: [] }, index_exists = true, index_op_fails = false, any_int = 0 +cc 2e8644e6397b5f76e0b79f961fa125e2f45f42f26e03c453c9a174dfb427500d # shrinks to task = Task { id: 0, index_uid: IndexUid("0"), content: SettingsUpdate { settings: Settings { displayed_attributes: NotSet, searchable_attributes: NotSet, filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, synonyms: NotSet, distinct_attribute: NotSet, _kind: PhantomData }, is_deletion: false, allow_index_creation: false }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0 diff --git a/meilisearch-lib/src/index/dump.rs b/meilisearch-lib/src/index/dump.rs index 8e703cab7..ace589529 100644 --- a/meilisearch-lib/src/index/dump.rs +++ b/meilisearch-lib/src/index/dump.rs @@ -8,7 +8,7 @@ use indexmap::IndexMap; use milli::documents::DocumentBatchReader; use serde::{Deserialize, Serialize}; -use crate::document_formats::read_ndjson; +use crate::document_formats::{read_ndjson, DocumentFormatError}; use crate::index::update_handler::UpdateHandler; use crate::index::updates::apply_settings_to_builder; @@ -128,23 +128,29 @@ impl Index { let mut tmp_doc_file = tempfile::tempfile()?; - read_ndjson(reader, &mut tmp_doc_file)?; + let empty = match read_ndjson(reader, &mut tmp_doc_file) { + // if there was no document in the file it's because the index was empty + Ok(_) => false, + Err(DocumentFormatError::EmptyPayload(_)) => true, + Err(e) => return Err(e.into()), + }; - tmp_doc_file.seek(SeekFrom::Start(0))?; + if !empty { + tmp_doc_file.seek(SeekFrom::Start(0))?; - let documents_reader = DocumentBatchReader::from_reader(tmp_doc_file)?; + let documents_reader = DocumentBatchReader::from_reader(tmp_doc_file)?; - //If the document file is empty, we don't perform the document addition, to prevent - //a primary key error to be thrown. - if !documents_reader.is_empty() { - let builder = update_handler - .update_builder() - .index_documents(&mut txn, &index); - builder.execute(documents_reader, |_| ())?; + //If the document file is empty, we don't perform the document addition, to prevent + //a primary key error to be thrown. + if !documents_reader.is_empty() { + let builder = update_handler + .update_builder() + .index_documents(&mut txn, &index); + builder.execute(documents_reader, |_| ())?; + } } txn.commit()?; - index.prepare_for_closing().wait(); Ok(()) diff --git a/meilisearch-lib/src/index/updates.rs b/meilisearch-lib/src/index/updates.rs index e9d9b0cd0..c1fc9a5c0 100644 --- a/meilisearch-lib/src/index/updates.rs +++ b/meilisearch-lib/src/index/updates.rs @@ -237,7 +237,9 @@ impl Index { let mut txn = self.write_txn()?; if let Some(primary_key) = primary_key { - self.update_primary_key_txn(&mut txn, primary_key)?; + if self.primary_key(&txn)?.is_none() { + self.update_primary_key_txn(&mut txn, primary_key)?; + } } let indexing_callback = |indexing_step| debug!("update: {:?}", indexing_step); diff --git a/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs b/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs index a7faf4c1b..597c11fe0 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs +++ b/meilisearch-lib/src/index_controller/dump_actor/compat/v3.rs @@ -74,11 +74,13 @@ impl From for TaskContent { primary_key, // document count is unknown for legacy updates documents_count: 0, + allow_index_creation: true, }, Update::Settings(settings) => TaskContent::SettingsUpdate { settings, // There is no way to know now, so we assume it isn't is_deletion: false, + allow_index_creation: true, }, Update::ClearDocuments => TaskContent::DocumentDeletion(DocumentDeletion::Clear), } diff --git a/meilisearch-lib/src/index_controller/dump_actor/error.rs b/meilisearch-lib/src/index_controller/dump_actor/error.rs index 625049fe0..73faf1bbb 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/error.rs +++ b/meilisearch-lib/src/index_controller/dump_actor/error.rs @@ -1,3 +1,4 @@ +use meilisearch_auth::error::AuthControllerError; use meilisearch_error::{internal_error, Code, ErrorCode}; use crate::{index_resolver::error::IndexResolverError, tasks::error::TaskError}; @@ -24,6 +25,7 @@ internal_error!( serde_json::error::Error, tempfile::PersistError, fs_extra::error::Error, + AuthControllerError, TaskError ); diff --git a/meilisearch-lib/src/index_controller/dump_actor/loaders/v4.rs b/meilisearch-lib/src/index_controller/dump_actor/loaders/v4.rs index ea71298e3..d342f010f 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/loaders/v4.rs +++ b/meilisearch-lib/src/index_controller/dump_actor/loaders/v4.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use heed::EnvOpenOptions; use log::info; +use meilisearch_auth::AuthController; use crate::analytics; use crate::index_controller::dump_actor::Metadata; @@ -38,6 +39,7 @@ pub fn load_dump( )?; UpdateFileStore::load_dump(src.as_ref(), &dst)?; TaskStore::load_dump(&src, env)?; + AuthController::load_dump(&src, &dst)?; analytics::copy_user_id(src.as_ref(), dst.as_ref()); info!("Loading indexes."); diff --git a/meilisearch-lib/src/index_controller/dump_actor/mod.rs b/meilisearch-lib/src/index_controller/dump_actor/mod.rs index b6b389b38..b7d2015a4 100644 --- a/meilisearch-lib/src/index_controller/dump_actor/mod.rs +++ b/meilisearch-lib/src/index_controller/dump_actor/mod.rs @@ -7,6 +7,7 @@ use serde::{Deserialize, Serialize}; pub use actor::DumpActor; pub use handle_impl::*; +use meilisearch_auth::AuthController; pub use message::DumpMsg; use tokio::fs::create_dir_all; use tokio::sync::oneshot; @@ -297,6 +298,8 @@ impl DumpJob { .dump(&temp_dump_path, self.update_file_store.clone()) .await?; + AuthController::dump(&self.db_path, &temp_dump_path)?; + let dump_path = tokio::task::spawn_blocking(move || -> Result { // for now we simply copy the updates/updates_files // FIXME: We may copy more files than necessary, if new files are added while we are diff --git a/meilisearch-lib/src/index_controller/error.rs b/meilisearch-lib/src/index_controller/error.rs index 6ec1cea35..85af76623 100644 --- a/meilisearch-lib/src/index_controller/error.rs +++ b/meilisearch-lib/src/index_controller/error.rs @@ -59,10 +59,7 @@ impl ErrorCode for IndexControllerError { IndexControllerError::DocumentFormatError(e) => e.error_code(), IndexControllerError::MissingPayload(_) => Code::MissingPayload, IndexControllerError::PayloadTooLarge => Code::PayloadTooLarge, - IndexControllerError::DumpError(DumpActorError::DumpAlreadyRunning) => { - Code::DumpAlreadyInProgress - } - IndexControllerError::DumpError(_) => Code::DumpProcessFailed, + IndexControllerError::DumpError(e) => e.error_code(), } } } diff --git a/meilisearch-lib/src/index_controller/mod.rs b/meilisearch-lib/src/index_controller/mod.rs index 4e2343850..3e3952058 100644 --- a/meilisearch-lib/src/index_controller/mod.rs +++ b/meilisearch-lib/src/index_controller/mod.rs @@ -119,6 +119,7 @@ pub enum Update { settings: Settings, /// Indicates whether the update was a deletion is_deletion: bool, + allow_index_creation: bool, }, DocumentAddition { #[derivative(Debug = "ignore")] @@ -126,6 +127,7 @@ pub enum Update { primary_key: Option, method: IndexDocumentsMethod, format: DocumentAdditionFormat, + allow_index_creation: bool, }, DeleteIndex, CreateIndex { @@ -165,7 +167,11 @@ impl IndexControllerBuilder { let db_exists = db_path.as_ref().exists(); if db_exists { - versioning::check_version_file(db_path.as_ref())?; + // Directory could be pre-created without any database in. + let db_is_empty = db_path.as_ref().read_dir()?.next().is_none(); + if !db_is_empty { + versioning::check_version_file(db_path.as_ref())?; + } } if let Some(ref path) = self.import_snapshot { @@ -340,15 +346,18 @@ where Update::Settings { settings, is_deletion, + allow_index_creation, } => TaskContent::SettingsUpdate { settings, is_deletion, + allow_index_creation, }, Update::DocumentAddition { mut payload, primary_key, format, method, + allow_index_creation, } => { let mut buffer = Vec::new(); while let Some(bytes) = payload.next().await { @@ -380,6 +389,7 @@ where merge_strategy: method, primary_key, documents_count, + allow_index_creation, } } Update::DeleteIndex => TaskContent::IndexDeletion, diff --git a/meilisearch-lib/src/index_controller/versioning/error.rs b/meilisearch-lib/src/index_controller/versioning/error.rs index 4dc29e862..8c29e267b 100644 --- a/meilisearch-lib/src/index_controller/versioning/error.rs +++ b/meilisearch-lib/src/index_controller/versioning/error.rs @@ -1,11 +1,11 @@ #[derive(thiserror::Error, Debug)] pub enum VersionFileError { - #[error("Version file is missing or the previous MeiliSearch engine version was below 0.24.0. Use a dump to update Meilisearch.")] + #[error("Version file is missing or the previous MeiliSearch engine version was below 0.24.0. Use a dump to update MeiliSearch.")] MissingVersionFile, #[error("Version file is corrupted and thus MeiliSearch is unable to determine the version of the database.")] MalformedVersionFile, #[error( - "Expected MeiliSearch engine version: {major}.{minor}.{patch}, current engine version: {}. To update Meilisearch use a dump.", + "Expected MeiliSearch engine version: {major}.{minor}.{patch}, current engine version: {}. To update MeiliSearch use a dump.", env!("CARGO_PKG_VERSION").to_string() )] VersionMismatch { diff --git a/meilisearch-lib/src/index_controller/versioning/mod.rs b/meilisearch-lib/src/index_controller/versioning/mod.rs index eba5d477e..4de894ebb 100644 --- a/meilisearch-lib/src/index_controller/versioning/mod.rs +++ b/meilisearch-lib/src/index_controller/versioning/mod.rs @@ -23,7 +23,7 @@ pub fn create_version_file(db_path: &Path) -> anyhow::Result<()> { Ok(()) } -// Ensures Meilisearch version is compatible with the database, returns an error versions mismatch. +// Ensures MeiliSearch version is compatible with the database, returns an error versions mismatch. pub fn check_version_file(db_path: &Path) -> anyhow::Result<()> { let version_path = db_path.join(VERSION_FILE_NAME); diff --git a/meilisearch-lib/src/index_resolver/mod.rs b/meilisearch-lib/src/index_resolver/mod.rs index 0b6beb2f3..c8b498d70 100644 --- a/meilisearch-lib/src/index_resolver/mod.rs +++ b/meilisearch-lib/src/index_resolver/mod.rs @@ -187,13 +187,18 @@ where content_uuid, merge_strategy, primary_key, + allow_index_creation, .. } => { let primary_key = primary_key.clone(); let content_uuid = *content_uuid; let method = *merge_strategy; - let index = self.get_or_create_index(index_uid, task.id).await?; + let index = if *allow_index_creation { + self.get_or_create_index(index_uid, task.id).await? + } else { + self.get_index(index_uid.into_inner()).await? + }; let file_store = self.file_store.clone(); let result = spawn_blocking(move || { index.update_documents(method, content_uuid, primary_key, file_store) @@ -226,8 +231,9 @@ where TaskContent::SettingsUpdate { settings, is_deletion, + allow_index_creation, } => { - let index = if *is_deletion { + let index = if *is_deletion || !*allow_index_creation { self.get_index(index_uid.into_inner()).await? } else { self.get_or_create_index(index_uid, task.id).await? @@ -502,8 +508,8 @@ mod test { match &task.content { // an unexisting index should trigger an index creation in the folllowing cases: - TaskContent::DocumentAddition { .. } - | TaskContent::SettingsUpdate { is_deletion: false, .. } + TaskContent::DocumentAddition { allow_index_creation: true, .. } + | TaskContent::SettingsUpdate { allow_index_creation: true, is_deletion: false, .. } | TaskContent::IndexCreation { .. } if !index_exists => { index_store .expect_create() @@ -565,6 +571,8 @@ mod test { || (!index_exists && matches!(task.content, TaskContent::IndexDeletion | TaskContent::DocumentDeletion(_) | TaskContent::SettingsUpdate { is_deletion: true, ..} + | TaskContent::SettingsUpdate { allow_index_creation: false, ..} + | TaskContent::DocumentAddition { allow_index_creation: false, ..} | TaskContent::IndexUpdate { .. } )) { assert!(result.is_err(), "{:?}", result); diff --git a/meilisearch-lib/src/snapshot.rs b/meilisearch-lib/src/snapshot.rs index 556e7fabd..d35922a68 100644 --- a/meilisearch-lib/src/snapshot.rs +++ b/meilisearch-lib/src/snapshot.rs @@ -107,6 +107,7 @@ impl SnapshotJob { self.snapshot_meta_env(temp_snapshot_path)?; self.snapshot_file_store(temp_snapshot_path)?; self.snapshot_indexes(temp_snapshot_path)?; + self.snapshot_auth(temp_snapshot_path)?; let db_name = self .src_path @@ -190,4 +191,18 @@ impl SnapshotJob { Ok(()) } + + fn snapshot_auth(&self, path: &Path) -> anyhow::Result<()> { + let auth_path = self.src_path.join("auth"); + let dst = path.join("auth"); + std::fs::create_dir_all(&dst)?; + let dst = dst.join("data.mdb"); + + let mut options = heed::EnvOpenOptions::new(); + options.map_size(1_073_741_824); + let env = options.open(auth_path)?; + env.copy_to_path(dst, heed::CompactionOption::Enabled)?; + + Ok(()) + } } diff --git a/meilisearch-lib/src/tasks/task.rs b/meilisearch-lib/src/tasks/task.rs index 028814136..add07f2ad 100644 --- a/meilisearch-lib/src/tasks/task.rs +++ b/meilisearch-lib/src/tasks/task.rs @@ -55,7 +55,7 @@ pub enum TaskEvent { }, } -/// A task represents an operation that Meilisearch must do. +/// A task represents an operation that MeiliSearch must do. /// It's stored on disk and executed from the lowest to highest Task id. /// Everytime a new task is created it has a higher Task id than the previous one. /// See also `Job`. @@ -91,7 +91,7 @@ impl Task { /// A job is like a volatile priority `Task`. /// It should be processed as fast as possible and is not stored on disk. -/// This means, when Meilisearch is closed all your unprocessed jobs will disappear. +/// This means, when MeiliSearch is closed all your unprocessed jobs will disappear. #[derive(Debug, derivative::Derivative)] #[derivative(PartialEq)] pub enum Job { @@ -134,12 +134,14 @@ pub enum TaskContent { merge_strategy: IndexDocumentsMethod, primary_key: Option, documents_count: usize, + allow_index_creation: bool, }, DocumentDeletion(DocumentDeletion), SettingsUpdate { settings: Settings, /// Indicates whether the task was a deletion is_deletion: bool, + allow_index_creation: bool, }, IndexDeletion, IndexCreation { diff --git a/meilisearch-lib/src/tasks/task_store/store.rs b/meilisearch-lib/src/tasks/task_store/store.rs index 49413f167..52b001dc7 100644 --- a/meilisearch-lib/src/tasks/task_store/store.rs +++ b/meilisearch-lib/src/tasks/task_store/store.rs @@ -87,7 +87,7 @@ impl Store { /// This function should be called *right after* creating the store. /// It put back all unfinished update in the `Created` state. This /// allow us to re-enqueue an update that didn't had the time to finish - /// when Meilisearch closed. + /// when MeiliSearch closed. pub fn reset_and_return_unfinished_tasks(&mut self) -> Result>> { let mut unfinished_tasks: BinaryHeap> = BinaryHeap::new();