1910: After v0.24.0: import `stable` in `main` r=MarinPostma a=curquiza



Co-authored-by: Tamo <tamo@meilisearch.com>
Co-authored-by: many <maxime@meilisearch.com>
Co-authored-by: bors[bot] <26634292+bors[bot]@users.noreply.github.com>
Co-authored-by: Guillaume Mourier <guillaume@meilisearch.com>
Co-authored-by: Irevoire <tamo@meilisearch.com>
Co-authored-by: Clémentine Urquizar <clementine@meilisearch.com>
This commit is contained in:
bors[bot] 2021-11-17 12:48:56 +00:00 committed by GitHub
commit 8363200fd7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 320 additions and 157 deletions

4
Cargo.lock generated
View File

@ -1804,8 +1804,8 @@ dependencies = [
[[package]] [[package]]
name = "milli" name = "milli"
version = "0.20.0" version = "0.20.2"
source = "git+https://github.com/meilisearch/milli.git?tag=v0.20.0#5a6d22d4ec51dda0aba94b314e1b5a38af9400a2" source = "git+https://github.com/meilisearch/milli.git?tag=v0.20.2#a2fc74f010116874c9be01d98a798d30ed718435"
dependencies = [ dependencies = [
"bimap", "bimap",
"bincode", "bincode",

View File

@ -53,7 +53,6 @@ pub enum Code {
IndexAlreadyExists, IndexAlreadyExists,
IndexNotFound, IndexNotFound,
InvalidIndexUid, InvalidIndexUid,
OpenIndex,
// invalid state error // invalid state error
InvalidState, InvalidState,
@ -64,19 +63,21 @@ pub enum Code {
MissingDocumentId, MissingDocumentId,
InvalidDocumentId, InvalidDocumentId,
Facet,
Filter, Filter,
Sort, Sort,
BadParameter, BadParameter,
BadRequest, BadRequest,
DatabaseSizeLimitReached,
DocumentNotFound, DocumentNotFound,
Internal, Internal,
InvalidGeoField, InvalidGeoField,
InvalidRankingRule, InvalidRankingRule,
InvalidStore,
InvalidToken, InvalidToken,
MissingAuthorizationHeader, MissingAuthorizationHeader,
NotFound, NoSpaceLeftOnDevice,
DumpNotFound,
TaskNotFound, TaskNotFound,
PayloadTooLarge, PayloadTooLarge,
RetrieveDocument, RetrieveDocument,
@ -100,25 +101,31 @@ impl Code {
match self { match self {
// index related errors // index related errors
// create index is thrown on internal error while creating an index. // create index is thrown on internal error while creating an index.
CreateIndex => ErrCode::internal("index_creation_failed", StatusCode::BAD_REQUEST), CreateIndex => {
ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT), IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT),
// thrown when requesting an unexisting index // thrown when requesting an unexisting index
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND), IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST), InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
OpenIndex => {
ErrCode::internal("index_not_accessible", StatusCode::INTERNAL_SERVER_ERROR)
}
// invalid state error // invalid state error
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR), InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
// thrown when no primary key has been set // thrown when no primary key has been set
MissingPrimaryKey => ErrCode::invalid("missing_primary_key", StatusCode::BAD_REQUEST), MissingPrimaryKey => {
ErrCode::invalid("primary_key_inference_failed", StatusCode::BAD_REQUEST)
}
// error thrown when trying to set an already existing primary key // error thrown when trying to set an already existing primary key
PrimaryKeyAlreadyPresent => { PrimaryKeyAlreadyPresent => {
ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST) ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST)
} }
// invalid ranking rule // invalid ranking rule
InvalidRankingRule => ErrCode::invalid("invalid_request", StatusCode::BAD_REQUEST), InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST),
// invalid database
InvalidStore => {
ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR)
}
// invalid document // invalid document
MaxFieldsLimitExceeded => { MaxFieldsLimitExceeded => {
@ -127,8 +134,6 @@ impl Code {
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST), MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST), InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST),
// error related to facets
Facet => ErrCode::invalid("invalid_facet", StatusCode::BAD_REQUEST),
// error related to filters // error related to filters
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST), Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
// error related to sorts // error related to sorts
@ -136,17 +141,22 @@ impl Code {
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST), BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST), BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
DatabaseSizeLimitReached => ErrCode::internal(
"database_size_limit_reached",
StatusCode::INTERNAL_SERVER_ERROR,
),
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND), DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR), Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
InvalidGeoField => { InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
ErrCode::authentication("invalid_geo_field", StatusCode::BAD_REQUEST)
}
InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN), InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN),
MissingAuthorizationHeader => { MissingAuthorizationHeader => {
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED) ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
} }
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND), TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
NotFound => ErrCode::invalid("not_found", StatusCode::NOT_FOUND), DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND),
NoSpaceLeftOnDevice => {
ErrCode::internal("no_space_left_on_device", StatusCode::INTERNAL_SERVER_ERROR)
}
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE), PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
RetrieveDocument => { RetrieveDocument => {
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST) ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
@ -158,7 +168,7 @@ impl Code {
// error related to dump // error related to dump
DumpAlreadyInProgress => { DumpAlreadyInProgress => {
ErrCode::invalid("dump_already_in_progress", StatusCode::CONFLICT) ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT)
} }
DumpProcessFailed => { DumpProcessFailed => {
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR) ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)

View File

@ -98,5 +98,5 @@ default = ["analytics", "mini-dashboard"]
tikv-jemallocator = "0.4.1" tikv-jemallocator = "0.4.1"
[package.metadata.mini-dashboard] [package.metadata.mini-dashboard]
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.4/build.zip" assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.5/build.zip"
sha1 = "750e8a8e56cfa61fbf9ead14b08a5f17ad3f3d37" sha1 = "1d955ea91b7691bd6fc207cb39866b82210783f0"

View File

@ -18,7 +18,7 @@ impl SearchAggregator {
Self::default() Self::default()
} }
pub fn finish(&mut self, _: &dyn Any) {} pub fn succeed(&mut self, _: &dyn Any) {}
} }
impl MockAnalytics { impl MockAnalytics {

View File

@ -1,4 +1,4 @@
use std::collections::{HashMap, HashSet}; use std::collections::{BinaryHeap, HashMap, HashSet};
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
@ -38,7 +38,7 @@ fn write_user_id(db_path: &Path, user_id: &str) {
} }
} }
const SEGMENT_API_KEY: &str = "vHi89WrNDckHSQssyUJqLvIyp2QFITSC"; const SEGMENT_API_KEY: &str = "P3FWhhEsJiEDCuEHpmcN9DHcK4hVfBvb";
pub fn extract_user_agents(request: &HttpRequest) -> Vec<String> { pub fn extract_user_agents(request: &HttpRequest) -> Vec<String> {
request request
@ -187,7 +187,7 @@ impl Segment {
"kernel_version": kernel_version, "kernel_version": kernel_version,
"cores": sys.processors().len(), "cores": sys.processors().len(),
"ram_size": sys.total_memory(), "ram_size": sys.total_memory(),
"disk_size": sys.disks().iter().map(|disk| disk.available_space()).max(), "disk_size": sys.disks().iter().map(|disk| disk.total_space()).max(),
"server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(), "server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(),
}) })
}); });
@ -254,9 +254,9 @@ impl Segment {
.await; .await;
} }
let get_search = std::mem::take(&mut self.get_search_aggregator) let get_search = std::mem::take(&mut self.get_search_aggregator)
.into_event(&self.user, "Document Searched GET"); .into_event(&self.user, "Documents Searched GET");
let post_search = std::mem::take(&mut self.post_search_aggregator) let post_search = std::mem::take(&mut self.post_search_aggregator)
.into_event(&self.user, "Document Searched POST"); .into_event(&self.user, "Documents Searched POST");
let add_documents = std::mem::take(&mut self.add_documents_aggregator) let add_documents = std::mem::take(&mut self.add_documents_aggregator)
.into_event(&self.user, "Documents Added"); .into_event(&self.user, "Documents Added");
let update_documents = std::mem::take(&mut self.update_documents_aggregator) let update_documents = std::mem::take(&mut self.update_documents_aggregator)
@ -286,7 +286,7 @@ pub struct SearchAggregator {
// requests // requests
total_received: usize, total_received: usize,
total_succeeded: usize, total_succeeded: usize,
time_spent: Vec<usize>, time_spent: BinaryHeap<usize>,
// sort // sort
sort_with_geo_point: bool, sort_with_geo_point: bool,
@ -364,7 +364,7 @@ impl SearchAggregator {
ret ret
} }
pub fn finish(&mut self, result: &SearchResult) { pub fn succeed(&mut self, result: &SearchResult) {
self.total_succeeded += 1; self.total_succeeded += 1;
self.time_spent.push(result.processing_time_ms as usize); self.time_spent.push(result.processing_time_ms as usize);
} }
@ -398,17 +398,21 @@ impl SearchAggregator {
self.max_offset = self.max_offset.max(other.max_offset); self.max_offset = self.max_offset.max(other.max_offset);
} }
pub fn into_event(mut self, user: &User, event_name: &str) -> Option<Track> { pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
if self.total_received == 0 { if self.total_received == 0 {
None None
} else { } else {
// the index of the 99th percentage of value
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.; let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
self.time_spent.drain(percentile_99th as usize..); // we get all the values in a sorted manner
let time_spent = self.time_spent.into_sorted_vec();
// We are only intersted by the slowest value of the 99th fastest results
let time_spent = time_spent[percentile_99th as usize];
let properties = json!({ let properties = json!({
"user-agent": self.user_agents, "user-agent": self.user_agents,
"requests": { "requests": {
"99th_response_time": format!("{:.2}", self.time_spent.iter().sum::<usize>() as f64 / self.time_spent.len() as f64), "99th_response_time": format!("{:.2}", time_spent),
"total_succeeded": self.total_succeeded, "total_succeeded": self.total_succeeded,
"total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics "total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics
"total_received": self.total_received, "total_received": self.total_received,

View File

@ -2,14 +2,14 @@ use meilisearch_error::{Code, ErrorCode};
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum AuthenticationError { pub enum AuthenticationError {
#[error("You must have an authorization token")] #[error("The X-MEILI-API-KEY header is missing.")]
MissingAuthorizationHeader, MissingAuthorizationHeader,
#[error("Invalid API key")] #[error("The provided API key is invalid.")]
InvalidToken(String), InvalidToken(String),
// Triggered on configuration error. // Triggered on configuration error.
#[error("Irretrievable state")] #[error("An internal error has occurred. `Irretrievable state`.")]
IrretrievableState, IrretrievableState,
#[error("Unknown authentication policy")] #[error("An internal error has occurred. `Unknown authentication policy`.")]
UnknownPolicy, UnknownPolicy,
} }

View File

@ -118,17 +118,18 @@ pub async fn search_with_url_query(
let mut aggregate = SearchAggregator::from_query(&query, &req); let mut aggregate = SearchAggregator::from_query(&query, &req);
let search_result = meilisearch let search_result = meilisearch.search(path.into_inner().index_uid, query).await;
.search(path.into_inner().index_uid, query) if let Ok(ref search_result) = search_result {
.await?; aggregate.succeed(search_result);
}
analytics.get_search(aggregate);
let search_result = search_result?;
// Tests that the nb_hits is always set to false // Tests that the nb_hits is always set to false
#[cfg(test)] #[cfg(test)]
assert!(!search_result.exhaustive_nb_hits); assert!(!search_result.exhaustive_nb_hits);
aggregate.finish(&search_result);
analytics.get_search(aggregate);
debug!("returns: {:?}", search_result); debug!("returns: {:?}", search_result);
Ok(HttpResponse::Ok().json(search_result)) Ok(HttpResponse::Ok().json(search_result))
} }
@ -145,17 +146,18 @@ pub async fn search_with_post(
let mut aggregate = SearchAggregator::from_query(&query, &req); let mut aggregate = SearchAggregator::from_query(&query, &req);
let search_result = meilisearch let search_result = meilisearch.search(path.into_inner().index_uid, query).await;
.search(path.into_inner().index_uid, query) if let Ok(ref search_result) = search_result {
.await?; aggregate.succeed(search_result);
}
analytics.post_search(aggregate);
let search_result = search_result?;
// Tests that the nb_hits is always set to false // Tests that the nb_hits is always set to false
#[cfg(test)] #[cfg(test)]
assert!(!search_result.exhaustive_nb_hits); assert!(!search_result.exhaustive_nb_hits);
aggregate.finish(&search_result);
analytics.post_search(aggregate);
debug!("returns: {:?}", search_result); debug!("returns: {:?}", search_result);
Ok(HttpResponse::Ok().json(search_result)) Ok(HttpResponse::Ok().json(search_result))
} }

View File

@ -97,8 +97,7 @@ pub struct FailedUpdateResult {
pub update_id: u64, pub update_id: u64,
#[serde(rename = "type")] #[serde(rename = "type")]
pub update_type: UpdateType, pub update_type: UpdateType,
#[serde(flatten)] pub error: ResponseError,
pub response: ResponseError,
pub duration: f64, // in seconds pub duration: f64, // in seconds
pub enqueued_at: DateTime<Utc>, pub enqueued_at: DateTime<Utc>,
pub processed_at: DateTime<Utc>, pub processed_at: DateTime<Utc>,
@ -190,12 +189,12 @@ impl From<UpdateStatus> for UpdateStatusResponse {
let update_id = failed.id(); let update_id = failed.id();
let processed_at = failed.failed_at; let processed_at = failed.failed_at;
let enqueued_at = failed.from.from.enqueued_at; let enqueued_at = failed.from.from.enqueued_at;
let response = failed.into(); let error = failed.into();
let content = FailedUpdateResult { let content = FailedUpdateResult {
update_id, update_id,
update_type, update_type,
response, error,
duration, duration,
enqueued_at, enqueued_at,
processed_at, processed_at,

View File

@ -7,6 +7,7 @@ use actix_web::http::StatusCode;
use paste::paste; use paste::paste;
use serde_json::{json, Value}; use serde_json::{json, Value};
use tokio::time::sleep; use tokio::time::sleep;
use urlencoding::encode;
use super::service::Service; use super::service::Service;
@ -14,12 +15,12 @@ macro_rules! make_settings_test_routes {
($($name:ident),+) => { ($($name:ident),+) => {
$(paste! { $(paste! {
pub async fn [<update_$name>](&self, value: Value) -> (Value, StatusCode) { pub async fn [<update_$name>](&self, value: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/{}", self.uid, stringify!($name).replace("_", "-")); let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
self.service.post(url, value).await self.service.post(url, value).await
} }
pub async fn [<get_$name>](&self) -> (Value, StatusCode) { pub async fn [<get_$name>](&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/{}", self.uid, stringify!($name).replace("_", "-")); let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
self.service.get(url).await self.service.get(url).await
} }
})* })*
@ -34,12 +35,15 @@ pub struct Index<'a> {
#[allow(dead_code)] #[allow(dead_code)]
impl Index<'_> { impl Index<'_> {
pub async fn get(&self) -> (Value, StatusCode) { pub async fn get(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}", self.uid); let url = format!("/indexes/{}", encode(self.uid.as_ref()).to_string());
self.service.get(url).await self.service.get(url).await
} }
pub async fn load_test_set(&self) -> u64 { pub async fn load_test_set(&self) -> u64 {
let url = format!("/indexes/{}/documents", self.uid); let url = format!(
"/indexes/{}/documents",
encode(self.uid.as_ref()).to_string()
);
let (response, code) = self let (response, code) = self
.service .service
.post_str(url, include_str!("../assets/test_set.json")) .post_str(url, include_str!("../assets/test_set.json"))
@ -62,13 +66,13 @@ impl Index<'_> {
let body = json!({ let body = json!({
"primaryKey": primary_key, "primaryKey": primary_key,
}); });
let url = format!("/indexes/{}", self.uid); let url = format!("/indexes/{}", encode(self.uid.as_ref()).to_string());
self.service.put(url, body).await self.service.put(url, body).await
} }
pub async fn delete(&self) -> (Value, StatusCode) { pub async fn delete(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}", self.uid); let url = format!("/indexes/{}", encode(self.uid.as_ref()).to_string());
self.service.delete(url).await self.service.delete(url).await
} }
@ -78,8 +82,15 @@ impl Index<'_> {
primary_key: Option<&str>, primary_key: Option<&str>,
) -> (Value, StatusCode) { ) -> (Value, StatusCode) {
let url = match primary_key { let url = match primary_key {
Some(key) => format!("/indexes/{}/documents?primaryKey={}", self.uid, key), Some(key) => format!(
None => format!("/indexes/{}/documents", self.uid), "/indexes/{}/documents?primaryKey={}",
encode(self.uid.as_ref()).to_string(),
key
),
None => format!(
"/indexes/{}/documents",
encode(self.uid.as_ref()).to_string()
),
}; };
self.service.post(url, documents).await self.service.post(url, documents).await
} }
@ -90,15 +101,26 @@ impl Index<'_> {
primary_key: Option<&str>, primary_key: Option<&str>,
) -> (Value, StatusCode) { ) -> (Value, StatusCode) {
let url = match primary_key { let url = match primary_key {
Some(key) => format!("/indexes/{}/documents?primaryKey={}", self.uid, key), Some(key) => format!(
None => format!("/indexes/{}/documents", self.uid), "/indexes/{}/documents?primaryKey={}",
encode(self.uid.as_ref()).to_string(),
key
),
None => format!(
"/indexes/{}/documents",
encode(self.uid.as_ref()).to_string()
),
}; };
self.service.put(url, documents).await self.service.put(url, documents).await
} }
pub async fn wait_update_id(&self, update_id: u64) -> Value { pub async fn wait_update_id(&self, update_id: u64) -> Value {
// try 10 times to get status, or panic to not wait forever // try 10 times to get status, or panic to not wait forever
let url = format!("/indexes/{}/updates/{}", self.uid, update_id); let url = format!(
"/indexes/{}/updates/{}",
encode(self.uid.as_ref()).to_string(),
update_id
);
for _ in 0..10 { for _ in 0..10 {
let (response, status_code) = self.service.get(&url).await; let (response, status_code) = self.service.get(&url).await;
assert_eq!(status_code, 200, "response: {}", response); assert_eq!(status_code, 200, "response: {}", response);
@ -113,12 +135,16 @@ impl Index<'_> {
} }
pub async fn get_update(&self, update_id: u64) -> (Value, StatusCode) { pub async fn get_update(&self, update_id: u64) -> (Value, StatusCode) {
let url = format!("/indexes/{}/updates/{}", self.uid, update_id); let url = format!(
"/indexes/{}/updates/{}",
encode(self.uid.as_ref()).to_string(),
update_id
);
self.service.get(url).await self.service.get(url).await
} }
pub async fn list_updates(&self) -> (Value, StatusCode) { pub async fn list_updates(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/updates", self.uid); let url = format!("/indexes/{}/updates", encode(self.uid.as_ref()).to_string());
self.service.get(url).await self.service.get(url).await
} }
@ -127,12 +153,19 @@ impl Index<'_> {
id: u64, id: u64,
_options: Option<GetDocumentOptions>, _options: Option<GetDocumentOptions>,
) -> (Value, StatusCode) { ) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents/{}", self.uid, id); let url = format!(
"/indexes/{}/documents/{}",
encode(self.uid.as_ref()).to_string(),
id
);
self.service.get(url).await self.service.get(url).await
} }
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) { pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
let mut url = format!("/indexes/{}/documents?", self.uid); let mut url = format!(
"/indexes/{}/documents?",
encode(self.uid.as_ref()).to_string()
);
if let Some(limit) = options.limit { if let Some(limit) = options.limit {
url.push_str(&format!("limit={}&", limit)); url.push_str(&format!("limit={}&", limit));
} }
@ -152,39 +185,58 @@ impl Index<'_> {
} }
pub async fn delete_document(&self, id: u64) -> (Value, StatusCode) { pub async fn delete_document(&self, id: u64) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents/{}", self.uid, id); let url = format!(
"/indexes/{}/documents/{}",
encode(self.uid.as_ref()).to_string(),
id
);
self.service.delete(url).await self.service.delete(url).await
} }
pub async fn clear_all_documents(&self) -> (Value, StatusCode) { pub async fn clear_all_documents(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents", self.uid); let url = format!(
"/indexes/{}/documents",
encode(self.uid.as_ref()).to_string()
);
self.service.delete(url).await self.service.delete(url).await
} }
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) { pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents/delete-batch", self.uid); let url = format!(
"/indexes/{}/documents/delete-batch",
encode(self.uid.as_ref()).to_string()
);
self.service self.service
.post(url, serde_json::to_value(&ids).unwrap()) .post(url, serde_json::to_value(&ids).unwrap())
.await .await
} }
pub async fn settings(&self) -> (Value, StatusCode) { pub async fn settings(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", self.uid); let url = format!(
"/indexes/{}/settings",
encode(self.uid.as_ref()).to_string()
);
self.service.get(url).await self.service.get(url).await
} }
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) { pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", self.uid); let url = format!(
"/indexes/{}/settings",
encode(self.uid.as_ref()).to_string()
);
self.service.post(url, settings).await self.service.post(url, settings).await
} }
pub async fn delete_settings(&self) -> (Value, StatusCode) { pub async fn delete_settings(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", self.uid); let url = format!(
"/indexes/{}/settings",
encode(self.uid.as_ref()).to_string()
);
self.service.delete(url).await self.service.delete(url).await
} }
pub async fn stats(&self) -> (Value, StatusCode) { pub async fn stats(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/stats", self.uid); let url = format!("/indexes/{}/stats", encode(self.uid.as_ref()).to_string());
self.service.get(url).await self.service.get(url).await
} }
@ -209,13 +261,17 @@ impl Index<'_> {
} }
pub async fn search_post(&self, query: Value) -> (Value, StatusCode) { pub async fn search_post(&self, query: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/search", self.uid); let url = format!("/indexes/{}/search", encode(self.uid.as_ref()).to_string());
self.service.post(url, query).await self.service.post(url, query).await
} }
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) { pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
let params = serde_url_params::to_string(&query).unwrap(); let params = serde_url_params::to_string(&query).unwrap();
let url = format!("/indexes/{}/search?{}", self.uid, params); let url = format!(
"/indexes/{}/search?{}",
encode(self.uid.as_ref()).to_string(),
params
);
self.service.get(url).await self.service.get(url).await
} }

View File

@ -7,7 +7,6 @@ use meilisearch_lib::options::{IndexerOpts, MaxMemory};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde_json::Value; use serde_json::Value;
use tempfile::TempDir; use tempfile::TempDir;
use urlencoding::encode;
use meilisearch_http::option::Opt; use meilisearch_http::option::Opt;
@ -62,7 +61,7 @@ impl Server {
/// Returns a view to an index. There is no guarantee that the index exists. /// Returns a view to an index. There is no guarantee that the index exists.
pub fn index(&self, uid: impl AsRef<str>) -> Index<'_> { pub fn index(&self, uid: impl AsRef<str>) -> Index<'_> {
Index { Index {
uid: encode(uid.as_ref()).to_string(), uid: uid.as_ref().to_string(),
service: &self.service, service: &self.service,
} }
} }

View File

@ -812,13 +812,15 @@ async fn error_add_documents_bad_document_id() {
let (response, code) = index.get_update(0).await; let (response, code) = index.get_update(0).await;
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], json!("failed")); assert_eq!(response["status"], json!("failed"));
assert_eq!(response["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
assert_eq!(response["code"], json!("invalid_document_id")); let expected_error = json!({
assert_eq!(response["type"], json!("invalid_request")); "message": "Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).",
assert_eq!( "code": "invalid_document_id",
response["link"], "type": "invalid_request",
json!("https://docs.meilisearch.com/errors#invalid_document_id") "link": "https://docs.meilisearch.com/errors#invalid_document_id"
); });
assert_eq!(response["error"], expected_error);
} }
#[actix_rt::test] #[actix_rt::test]
@ -837,13 +839,15 @@ async fn error_update_documents_bad_document_id() {
let (response, code) = index.get_update(0).await; let (response, code) = index.get_update(0).await;
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], json!("failed")); assert_eq!(response["status"], json!("failed"));
assert_eq!(response["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
assert_eq!(response["code"], json!("invalid_document_id")); let expected_error = json!({
assert_eq!(response["type"], json!("invalid_request")); "message": "Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_).",
assert_eq!( "code": "invalid_document_id",
response["link"], "type": "invalid_request",
json!("https://docs.meilisearch.com/errors#invalid_document_id") "link": "https://docs.meilisearch.com/errors#invalid_document_id"
); });
assert_eq!(response["error"], expected_error);
} }
#[actix_rt::test] #[actix_rt::test]
@ -862,16 +866,15 @@ async fn error_add_documents_missing_document_id() {
let (response, code) = index.get_update(0).await; let (response, code) = index.get_update(0).await;
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], "failed"); assert_eq!(response["status"], "failed");
assert_eq!(
response["message"], let expected_error = json!({
json!(r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."#) "message": r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."#,
); "code": "missing_document_id",
assert_eq!(response["code"], json!("missing_document_id")); "type": "invalid_request",
assert_eq!(response["type"], json!("invalid_request")); "link": "https://docs.meilisearch.com/errors#missing_document_id"
assert_eq!( });
response["link"],
json!("https://docs.meilisearch.com/errors#missing_document_id") assert_eq!(response["error"], expected_error);
);
} }
#[actix_rt::test] #[actix_rt::test]
@ -890,16 +893,15 @@ async fn error_update_documents_missing_document_id() {
let (response, code) = index.get_update(0).await; let (response, code) = index.get_update(0).await;
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], "failed"); assert_eq!(response["status"], "failed");
assert_eq!(
response["message"], let expected_error = json!({
r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."# "message": r#"Document doesn't have a `docid` attribute: `{"id":"11","content":"foobar"}`."#,
); "code": "missing_document_id",
assert_eq!(response["code"], "missing_document_id"); "type": "invalid_request",
assert_eq!(response["type"], "invalid_request"); "link": "https://docs.meilisearch.com/errors#missing_document_id"
assert_eq!( });
response["link"],
"https://docs.meilisearch.com/errors#missing_document_id" assert_eq!(response["error"], expected_error);
);
} }
#[actix_rt::test] #[actix_rt::test]
@ -927,45 +929,47 @@ async fn error_document_field_limit_reached() {
assert_eq!(code, 200); assert_eq!(code, 200);
// Documents without a primary key are not accepted. // Documents without a primary key are not accepted.
assert_eq!(response["status"], "failed"); assert_eq!(response["status"], "failed");
assert_eq!(
response["message"], let expected_error = json!({
"A document cannot contain more than 65,535 fields." "message": "A document cannot contain more than 65,535 fields.",
); "code": "document_fields_limit_reached",
assert_eq!(response["code"], "document_fields_limit_reached"); "type": "invalid_request",
assert_eq!(response["type"], "invalid_request"); "link": "https://docs.meilisearch.com/errors#document_fields_limit_reached"
assert_eq!( });
response["link"],
"https://docs.meilisearch.com/errors#document_fields_limit_reached" assert_eq!(response["error"], expected_error);
);
} }
#[actix_rt::test] #[actix_rt::test]
#[ignore] // // TODO: Fix in an other PR: this does not provoke any error.
async fn error_add_documents_invalid_geo_field() { async fn error_add_documents_invalid_geo_field() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test"); let index = server.index("test");
index.create(Some("id")).await; index.create(Some("id")).await;
index
.update_settings(json!({"sortableAttributes": ["_geo"]}))
.await;
let documents = json!([ let documents = json!([
{ {
"id": "11", "id": "11",
"_geo": "foobar" "_geo": "foobar"
} }
]); ]);
index.add_documents(documents, None).await; index.add_documents(documents, None).await;
index.wait_update_id(0).await; index.wait_update_id(1).await;
let (response, code) = index.get_update(0).await; let (response, code) = index.get_update(1).await;
assert_eq!(code, 200); assert_eq!(code, 200);
assert_eq!(response["status"], "failed"); assert_eq!(response["status"], "failed");
assert_eq!(
response["message"], let expected_error = json!({
r#"The document with the id: `11` contains an invalid _geo field: :syntaxErrorHelper:REPLACE_ME."# "message": r#"The document with the id: `11` contains an invalid _geo field: `foobar`."#,
); "code": "invalid_geo_field",
assert_eq!(response["code"], "invalid_geo_field"); "type": "invalid_request",
assert_eq!(response["type"], "invalid_request"); "link": "https://docs.meilisearch.com/errors#invalid_geo_field"
assert_eq!( });
response["link"],
"https://docs.meilisearch.com/errors#invalid_geo_field" assert_eq!(response["error"], expected_error);
);
} }
#[actix_rt::test] #[actix_rt::test]
@ -993,3 +997,31 @@ async fn error_add_documents_payload_size() {
assert_eq!(response, expected_response); assert_eq!(response, expected_response);
assert_eq!(code, 413); assert_eq!(code, 413);
} }
#[actix_rt::test]
async fn error_primary_key_inference() {
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{
"title": "11",
"desc": "foobar"
}
]);
index.add_documents(documents, None).await;
index.wait_update_id(0).await;
let (response, code) = index.get_update(0).await;
assert_eq!(code, 200);
assert_eq!(response["status"], "failed");
let expected_error = json!({
"message": r#"The primary key inference process failed because the engine did not find any fields containing `id` substring in their name. If your document identifier does not contain any `id` substring, you can set the primary key of the index."#,
"code": "primary_key_inference_failed",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#primary_key_inference_failed"
});
assert_eq!(response["error"], expected_error);
}

View File

@ -89,7 +89,6 @@ async fn error_create_existing_index() {
} }
#[actix_rt::test] #[actix_rt::test]
#[ignore] // TODO: Fix in an other PR: uid returned `test%20test%23%21` instead of `test test#!`
async fn error_create_with_invalid_index_uid() { async fn error_create_with_invalid_index_uid() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test test#!"); let index = server.index("test test#!");

View File

@ -63,7 +63,7 @@ async fn get_settings() {
} }
#[actix_rt::test] #[actix_rt::test]
async fn update_settings_unknown_field() { async fn error_update_settings_unknown_field() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test"); let index = server.index("test");
let (_response, code) = index.update_settings(json!({"foo": 12})).await; let (_response, code) = index.update_settings(json!({"foo": 12})).await;
@ -95,10 +95,19 @@ async fn test_partial_update() {
} }
#[actix_rt::test] #[actix_rt::test]
async fn delete_settings_unexisting_index() { async fn error_delete_settings_unexisting_index() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test"); let index = server.index("test");
let (_response, code) = index.delete_settings().await; let (response, code) = index.delete_settings().await;
let expected_response = json!({
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response, expected_response);
assert_eq!(code, 404); assert_eq!(code, 404);
} }
@ -164,11 +173,20 @@ async fn update_setting_unexisting_index() {
} }
#[actix_rt::test] #[actix_rt::test]
async fn update_setting_unexisting_index_invalid_uid() { async fn error_update_setting_unexisting_index_invalid_uid() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test##! "); let index = server.index("test##! ");
let (response, code) = index.update_settings(json!({})).await; let (response, code) = index.update_settings(json!({})).await;
assert_eq!(code, 400, "{}", response);
let expected_response = json!({
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
});
assert_eq!(response, expected_response);
assert_eq!(code, 400);
} }
macro_rules! test_setting_routes { macro_rules! test_setting_routes {
@ -246,3 +264,49 @@ test_setting_routes!(
ranking_rules, ranking_rules,
synonyms synonyms
); );
#[actix_rt::test]
async fn error_set_invalid_ranking_rules() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
let (_response, _code) = index
.update_settings(json!({ "rankingRules": [ "manyTheFish"]}))
.await;
index.wait_update_id(0).await;
let (response, code) = index.get_update(0).await;
assert_eq!(code, 200);
assert_eq!(response["status"], "failed");
let expected_error = json!({
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are Words, Typo, Sort, Proximity, Attribute, Exactness and custom ranking rules."#,
"code": "invalid_ranking_rule",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_ranking_rule"
});
assert_eq!(response["error"], expected_error);
}
#[actix_rt::test]
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_update_id(0).await;
let (response, _) = index.get_distinct_attribute().await;
assert_eq!(response, "test");
index.update_distinct_attribute(json!(null)).await;
index.wait_update_id(1).await;
let (response, _) = index.get_distinct_attribute().await;
assert_eq!(response, json!(null));
}

View File

@ -30,7 +30,7 @@ lazy_static = "1.4.0"
log = "0.4.14" log = "0.4.14"
meilisearch-error = { path = "../meilisearch-error" } meilisearch-error = { path = "../meilisearch-error" }
meilisearch-tokenizer = { git = "https://github.com/meilisearch/tokenizer.git", tag = "v0.2.5" } meilisearch-tokenizer = { git = "https://github.com/meilisearch/tokenizer.git", tag = "v0.2.5" }
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.20.0" } milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.20.2" }
mime = "0.3.16" mime = "0.3.16"
num_cpus = "1.13.0" num_cpus = "1.13.0"
once_cell = "1.8.0" once_cell = "1.8.0"

View File

@ -25,7 +25,7 @@ impl fmt::Display for PayloadType {
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum DocumentFormatError { pub enum DocumentFormatError {
#[error("Internal error!: {0}")] #[error("An internal error has occurred. `{0}`.")]
Internal(Box<dyn std::error::Error + Send + Sync + 'static>), Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
#[error("The `{1}` payload provided is malformed. `{0}`.")] #[error("The `{1}` payload provided is malformed. `{0}`.")]
MalformedPayload( MalformedPayload(

View File

@ -36,11 +36,11 @@ impl ErrorCode for MilliError<'_> {
match error { match error {
// TODO: wait for spec for new error codes. // TODO: wait for spec for new error codes.
UserError::SerdeJson(_) UserError::SerdeJson(_)
| UserError::MaxDatabaseSizeReached
| UserError::InvalidStoreFile
| UserError::NoSpaceLeftOnDevice
| UserError::DocumentLimitReached | UserError::DocumentLimitReached
| UserError::UnknownInternalDocumentId { .. } => Code::Internal, | UserError::UnknownInternalDocumentId { .. } => Code::Internal,
UserError::InvalidStoreFile => Code::InvalidStore,
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
UserError::MaxDatabaseSizeReached => Code::DatabaseSizeLimitReached,
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded, UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
UserError::InvalidFilter(_) => Code::Filter, UserError::InvalidFilter(_) => Code::Filter,
UserError::MissingDocumentId { .. } => Code::MissingDocumentId, UserError::MissingDocumentId { .. } => Code::MissingDocumentId,

View File

@ -9,7 +9,7 @@ pub type Result<T> = std::result::Result<T, IndexError>;
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum IndexError { pub enum IndexError {
#[error("Internal error: {0}")] #[error("An internal error has occurred. `{0}`.")]
Internal(Box<dyn Error + Send + Sync + 'static>), Internal(Box<dyn Error + Send + Sync + 'static>),
#[error("Document `{0}` not found.")] #[error("Document `{0}` not found.")]
DocumentNotFound(String), DocumentNotFound(String),

View File

@ -7,11 +7,11 @@ pub type Result<T> = std::result::Result<T, DumpActorError>;
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum DumpActorError { pub enum DumpActorError {
#[error("Another dump is already in progress")] #[error("A dump is already processing. You must wait until the current process is finished before requesting another dump.")]
DumpAlreadyRunning, DumpAlreadyRunning,
#[error("Dump `{0}` not found.")] #[error("Dump `{0}` not found.")]
DumpDoesNotExist(String), DumpDoesNotExist(String),
#[error("Internal error: {0}")] #[error("An internal error has occurred. `{0}`.")]
Internal(Box<dyn std::error::Error + Send + Sync + 'static>), Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
#[error("{0}")] #[error("{0}")]
IndexResolver(#[from] IndexResolverError), IndexResolver(#[from] IndexResolverError),
@ -43,7 +43,7 @@ impl ErrorCode for DumpActorError {
fn error_code(&self) -> Code { fn error_code(&self) -> Code {
match self { match self {
DumpActorError::DumpAlreadyRunning => Code::DumpAlreadyInProgress, DumpActorError::DumpAlreadyRunning => Code::DumpAlreadyInProgress,
DumpActorError::DumpDoesNotExist(_) => Code::NotFound, DumpActorError::DumpDoesNotExist(_) => Code::DumpNotFound,
DumpActorError::Internal(_) => Code::Internal, DumpActorError::Internal(_) => Code::Internal,
DumpActorError::IndexResolver(e) => e.error_code(), DumpActorError::IndexResolver(e) => e.error_code(),
DumpActorError::UpdateLoop(e) => e.error_code(), DumpActorError::UpdateLoop(e) => e.error_code(),

View File

@ -361,14 +361,13 @@ mod compat {
"index_already_exists" => Code::IndexAlreadyExists, "index_already_exists" => Code::IndexAlreadyExists,
"index_not_found" => Code::IndexNotFound, "index_not_found" => Code::IndexNotFound,
"invalid_index_uid" => Code::InvalidIndexUid, "invalid_index_uid" => Code::InvalidIndexUid,
"index_not_accessible" => Code::OpenIndex,
"invalid_state" => Code::InvalidState, "invalid_state" => Code::InvalidState,
"missing_primary_key" => Code::MissingPrimaryKey, "missing_primary_key" => Code::MissingPrimaryKey,
"primary_key_already_present" => Code::PrimaryKeyAlreadyPresent, "primary_key_already_present" => Code::PrimaryKeyAlreadyPresent,
"invalid_request" => Code::InvalidRankingRule, "invalid_request" => Code::InvalidRankingRule,
"max_fields_limit_exceeded" => Code::MaxFieldsLimitExceeded, "max_fields_limit_exceeded" => Code::MaxFieldsLimitExceeded,
"missing_document_id" => Code::MissingDocumentId, "missing_document_id" => Code::MissingDocumentId,
"invalid_facet" => Code::Facet, "invalid_facet" => Code::Filter,
"invalid_filter" => Code::Filter, "invalid_filter" => Code::Filter,
"invalid_sort" => Code::Sort, "invalid_sort" => Code::Sort,
"bad_parameter" => Code::BadParameter, "bad_parameter" => Code::BadParameter,
@ -378,7 +377,6 @@ mod compat {
"invalid_geo_field" => Code::InvalidGeoField, "invalid_geo_field" => Code::InvalidGeoField,
"invalid_token" => Code::InvalidToken, "invalid_token" => Code::InvalidToken,
"missing_authorization_header" => Code::MissingAuthorizationHeader, "missing_authorization_header" => Code::MissingAuthorizationHeader,
"not_found" => Code::NotFound,
"payload_too_large" => Code::PayloadTooLarge, "payload_too_large" => Code::PayloadTooLarge,
"unretrievable_document" => Code::RetrieveDocument, "unretrievable_document" => Code::RetrieveDocument,
"search_error" => Code::SearchDocuments, "search_error" => Code::SearchDocuments,

View File

@ -24,7 +24,7 @@ pub enum IndexControllerError {
DumpActor(#[from] DumpActorError), DumpActor(#[from] DumpActorError),
#[error("{0}")] #[error("{0}")]
IndexError(#[from] IndexError), IndexError(#[from] IndexError),
#[error("Internal error: {0}")] #[error("An internal error has occurred. `{0}`.")]
Internal(Box<dyn Error + Send + Sync + 'static>), Internal(Box<dyn Error + Send + Sync + 'static>),
} }

View File

@ -19,9 +19,9 @@ pub enum IndexResolverError {
UnexistingIndex(String), UnexistingIndex(String),
#[error("A primary key is already present. It's impossible to update it")] #[error("A primary key is already present. It's impossible to update it")]
ExistingPrimaryKey, ExistingPrimaryKey,
#[error("Internal Error: `{0}`")] #[error("An internal error has occurred. `{0}`.")]
Internal(Box<dyn std::error::Error + Send + Sync + 'static>), Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
#[error("Internal Error: Index uuid `{0}` is already assigned.")] #[error("The creation of the `{0}` index has failed due to `Index uuid is already assigned`.")]
UuidAlreadyExists(Uuid), UuidAlreadyExists(Uuid),
#[error("{0}")] #[error("{0}")]
Milli(#[from] milli::Error), Milli(#[from] milli::Error),
@ -60,7 +60,7 @@ impl ErrorCode for IndexResolverError {
IndexResolverError::UnexistingIndex(_) => Code::IndexNotFound, IndexResolverError::UnexistingIndex(_) => Code::IndexNotFound,
IndexResolverError::ExistingPrimaryKey => Code::PrimaryKeyAlreadyPresent, IndexResolverError::ExistingPrimaryKey => Code::PrimaryKeyAlreadyPresent,
IndexResolverError::Internal(_) => Code::Internal, IndexResolverError::Internal(_) => Code::Internal,
IndexResolverError::UuidAlreadyExists(_) => Code::Internal, IndexResolverError::UuidAlreadyExists(_) => Code::CreateIndex,
IndexResolverError::Milli(e) => MilliError(e).error_code(), IndexResolverError::Milli(e) => MilliError(e).error_code(),
IndexResolverError::BadlyFormatted(_) => Code::InvalidIndexUid, IndexResolverError::BadlyFormatted(_) => Code::InvalidIndexUid,
} }

View File

@ -16,7 +16,7 @@ pub type Result<T> = std::result::Result<T, UpdateLoopError>;
pub enum UpdateLoopError { pub enum UpdateLoopError {
#[error("Task `{0}` not found.")] #[error("Task `{0}` not found.")]
UnexistingUpdate(u64), UnexistingUpdate(u64),
#[error("Internal error: {0}")] #[error("An internal error has occurred. `{0}`.")]
Internal(Box<dyn Error + Send + Sync + 'static>), Internal(Box<dyn Error + Send + Sync + 'static>),
#[error( #[error(
"update store was shut down due to a fatal error, please check your logs for more info." "update store was shut down due to a fatal error, please check your logs for more info."