From a476f1aeb60c96e22a3f7be41aa6881b143dbfe1 Mon Sep 17 00:00:00 2001 From: airycanon Date: Thu, 14 Nov 2024 15:59:52 +0800 Subject: [PATCH 1/2] attach index name in errors --- crates/index-scheduler/src/batch.rs | 194 +++++++++++------- crates/index-scheduler/src/error.rs | 15 +- .../src/index_mapper/index_map.rs | 4 +- .../index-scheduler/src/index_mapper/mod.rs | 13 +- crates/index-scheduler/src/lib.rs | 21 +- crates/meilisearch/src/error.rs | 16 +- crates/meilisearch/src/lib.rs | 3 +- .../src/routes/indexes/facet_search.rs | 2 +- crates/meilisearch/src/routes/indexes/mod.rs | 4 +- .../meilisearch/src/routes/indexes/search.rs | 14 +- .../meilisearch/src/routes/indexes/similar.rs | 2 +- crates/meilisearch/src/routes/multi_search.rs | 6 +- crates/meilisearch/src/search/federated.rs | 7 +- crates/meilisearch/src/search/mod.rs | 43 ++-- 14 files changed, 210 insertions(+), 134 deletions(-) diff --git a/crates/index-scheduler/src/batch.rs b/crates/index-scheduler/src/batch.rs index 903ec1217..176bdd410 100644 --- a/crates/index-scheduler/src/batch.rs +++ b/crates/index-scheduler/src/batch.rs @@ -448,20 +448,20 @@ impl IndexScheduler { match (document_import, settings) { ( Some(Batch::IndexOperation { - op: - IndexOperation::DocumentOperation { - primary_key, - documents_counts, - operations, - tasks: document_import_tasks, - .. - }, - .. - }), + op: + IndexOperation::DocumentOperation { + primary_key, + documents_counts, + operations, + tasks: document_import_tasks, + .. + }, + .. + }), Some(Batch::IndexOperation { - op: IndexOperation::Settings { settings, tasks: settings_tasks, .. }, - .. - }), + op: IndexOperation::Settings { settings, tasks: settings_tasks, .. }, + .. + }), ) => Ok(Some(Batch::IndexOperation { op: IndexOperation::SettingsAndDocumentOperation { index_uid, @@ -618,7 +618,12 @@ impl IndexScheduler { /// The list of tasks that were processed. The metadata of each task in the returned /// list is updated accordingly, with the exception of the its date fields /// [`finished_at`](meilisearch_types::tasks::Task::finished_at) and [`started_at`](meilisearch_types::tasks::Task::started_at). - #[tracing::instrument(level = "trace", skip(self, batch), target = "indexing::scheduler", fields(batch=batch.to_string()))] + #[tracing::instrument( + level = "trace", + skip(self, batch), + target = "indexing::scheduler", + fields(batch=batch.to_string()) + )] pub(crate) fn process_batch(&self, batch: Batch) -> Result> { #[cfg(test)] { @@ -627,6 +632,8 @@ impl IndexScheduler { self.breakpoint(crate::Breakpoint::InsideProcessBatch); } + let index_uid = batch.index_uid().map(String::from); + match batch { Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => { // 1. Retrieve the tasks that matched the query at enqueue-time. @@ -649,10 +656,10 @@ impl IndexScheduler { task.status = Status::Succeeded; match &mut task.details { Some(Details::TaskCancelation { - matched_tasks: _, - canceled_tasks, - original_filter: _, - }) => { + matched_tasks: _, + canceled_tasks, + original_filter: _, + }) => { *canceled_tasks = Some(canceled_tasks_content_uuids.len() as u64); } _ => unreachable!(), @@ -706,10 +713,10 @@ impl IndexScheduler { match &mut task.details { Some(Details::TaskDeletion { - matched_tasks: _, - deleted_tasks, - original_filter: _, - }) => { + matched_tasks: _, + deleted_tasks, + original_filter: _, + }) => { *deleted_tasks = Some(deleted_tasks_count); } _ => unreachable!(), @@ -765,7 +772,8 @@ impl IndexScheduler { let index = self.index_mapper.index(&rtxn, name)?; let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string()); fs::create_dir_all(&dst)?; - index.copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled)?; + index.copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled) + .map_err(|e| Error::from_milli(e, Some(name.to_string())))?; } drop(rtxn); @@ -843,7 +851,6 @@ impl IndexScheduler { let (_, mut t) = ret?; let status = t.status; let content_file = t.content_uuid(); - // In the case we're dumping ourselves we want to be marked as finished // to not loop over ourselves indefinitely. if t.uid == task.uid { @@ -867,18 +874,24 @@ impl IndexScheduler { let content_file = self.file_store.get_update(content_file)?; let reader = DocumentsBatchReader::from_reader(content_file) - .map_err(milli::Error::from)?; + .map_err(|e| Error::Milli { + error: e.into(), + index_name: index_uid.clone(), + })?; let (mut cursor, documents_batch_index) = reader.into_cursor_and_fields_index(); - while let Some(doc) = - cursor.next_document().map_err(milli::Error::from)? + while let Some(doc) = cursor.next_document() + .map_err(|e| Error::Milli { + error: e.into(), + index_name: index_uid.clone(), + })? { dump_content_file.push_document(&obkv_to_object( &doc, &documents_batch_index, - )?)?; + ).map_err(|e| Error::from_milli(e, index_uid.clone()))?)?; } dump_content_file.flush()?; } @@ -892,27 +905,35 @@ impl IndexScheduler { let metadata = IndexMetadata { uid: uid.to_owned(), primary_key: index.primary_key(&rtxn)?.map(String::from), - created_at: index.created_at(&rtxn)?, - updated_at: index.updated_at(&rtxn)?, + created_at: index.created_at(&rtxn) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?, + updated_at: index.updated_at(&rtxn) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?, }; let mut index_dumper = dump.create_index(uid, &metadata)?; let fields_ids_map = index.fields_ids_map(&rtxn)?; let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect(); - let embedding_configs = index.embedding_configs(&rtxn)?; + let embedding_configs = index.embedding_configs(&rtxn) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; + let documents = index.all_documents(&rtxn) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; // 3.1. Dump the documents - for ret in index.all_documents(&rtxn)? { + for ret in documents { if self.must_stop_processing.get() { return Err(Error::AbortedTask); } - let (id, doc) = ret?; + let (id, doc) = ret + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; - let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?; + let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; 'inject_vectors: { - let embeddings = index.embeddings(&rtxn, id)?; + let embeddings = index.embeddings(&rtxn, id) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; if embeddings.is_empty() { break 'inject_vectors; @@ -923,7 +944,7 @@ impl IndexScheduler { .or_insert(serde_json::Value::Object(Default::default())); let serde_json::Value::Object(vectors) = vectors else { - return Err(milli::Error::UserError( + return Err(Error::from_milli(milli::Error::UserError( milli::UserError::InvalidVectorsMapType { document_id: { if let Ok(Some(Ok(index))) = index @@ -937,8 +958,7 @@ impl IndexScheduler { }, value: vectors.clone(), }, - ) - .into()); + ), Some(uid.to_string()))); }; for (embedder_name, embeddings) in embeddings { @@ -968,7 +988,7 @@ impl IndexScheduler { index, &rtxn, meilisearch_types::settings::SecretPolicy::RevealSecrets, - )?; + ).map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; index_dumper.settings(&settings)?; Ok(()) })?; @@ -1018,7 +1038,8 @@ impl IndexScheduler { // the entire batch. let res = || -> Result<()> { let index_rtxn = index.read_txn()?; - let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn)?; + let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; let mut wtxn = self.env.write_txn()?; self.index_mapper.store_stats_of(&mut wtxn, &index_uid, &stats)?; wtxn.commit()?; @@ -1060,7 +1081,7 @@ impl IndexScheduler { builder.execute( |indexing_step| tracing::debug!(update = ?indexing_step), || must_stop_processing.get(), - )?; + ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; index_wtxn.commit()?; } @@ -1077,7 +1098,8 @@ impl IndexScheduler { let res = || -> Result<()> { let mut wtxn = self.env.write_txn()?; let index_rtxn = index.read_txn()?; - let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn)?; + let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; self.index_mapper.store_stats_of(&mut wtxn, &index_uid, &stats)?; wtxn.commit()?; Ok(()) @@ -1100,9 +1122,11 @@ impl IndexScheduler { let number_of_documents = || -> Result { let index = self.index_mapper.index(&wtxn, &index_uid)?; let index_rtxn = index.read_txn()?; - Ok(index.number_of_documents(&index_rtxn)?) + Ok(index.number_of_documents(&index_rtxn) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))? + ) }() - .unwrap_or_default(); + .unwrap_or_default(); // The write transaction is directly owned and committed inside. match self.index_mapper.delete_index(wtxn, &index_uid) { @@ -1219,8 +1243,9 @@ impl IndexScheduler { operation: IndexOperation, ) -> Result> { match operation { - IndexOperation::DocumentClear { mut tasks, .. } => { - let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?; + IndexOperation::DocumentClear { index_uid, mut tasks, .. } => { + let count = milli::update::ClearDocuments::new(index_wtxn, index).execute() + .map_err(|e| Error::from_milli(e, Some(index_uid)))?; let mut first_clear_found = false; for task in &mut tasks { @@ -1240,7 +1265,7 @@ impl IndexScheduler { Ok(tasks) } IndexOperation::DocumentOperation { - index_uid: _, + index_uid, primary_key, method, documents_counts: _, @@ -1258,10 +1283,11 @@ impl IndexScheduler { // but to a different value, we can make the whole batch fail. Some(pk) => { if primary_key != pk { - return Err(milli::Error::from( - milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()), + return Err(Error::from_milli( + milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()).into(), + Some(index_uid.clone()), ) - .into()); + .into()); } } // if the primary key was set and there was no primary key set for this index @@ -1273,7 +1299,7 @@ impl IndexScheduler { builder.execute( |indexing_step| tracing::debug!(update = ?indexing_step), || must_stop_processing.clone().get(), - )?; + ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; primary_key_has_been_set = true; } } @@ -1281,9 +1307,10 @@ impl IndexScheduler { let config = IndexDocumentsConfig { update_method: method, ..Default::default() }; - let embedder_configs = index.embedding_configs(index_wtxn)?; + let embedder_configs = index.embedding_configs(index_wtxn) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; // TODO: consider Arc'ing the map too (we only need read access + we'll be cloning it multiple times, so really makes sense) - let embedders = self.embedders(embedder_configs)?; + let embedders = self.embedders(index_uid.clone(), embedder_configs)?; let mut builder = milli::update::IndexDocuments::new( index_wtxn, @@ -1292,24 +1319,25 @@ impl IndexScheduler { config, |indexing_step| tracing::trace!(?indexing_step, "Update"), || must_stop_processing.get(), - )?; + ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; for (operation, task) in operations.into_iter().zip(tasks.iter_mut()) { match operation { DocumentOperation::Add(content_uuid) => { let content_file = self.file_store.get_update(content_uuid)?; let reader = DocumentsBatchReader::from_reader(content_file) - .map_err(milli::Error::from)?; - let (new_builder, user_result) = builder.add_documents(reader)?; + .map_err(|e| Error::from_milli(e.into(), Some(index_uid.clone())))?; + let (new_builder, user_result) = builder.add_documents(reader) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; builder = new_builder; builder = builder.with_embedders(embedders.clone()); let received_documents = if let Some(Details::DocumentAdditionOrUpdate { - received_documents, - .. - }) = task.details + received_documents, + .. + }) = task.details { received_documents } else { @@ -1337,7 +1365,8 @@ impl IndexScheduler { } DocumentOperation::Delete(document_ids) => { let (new_builder, user_result) = - builder.remove_documents(document_ids)?; + builder.remove_documents(document_ids) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; builder = new_builder; // Uses Invariant: remove documents actually always returns Ok for the inner result let count = user_result.unwrap(); @@ -1361,7 +1390,7 @@ impl IndexScheduler { } if !tasks.iter().all(|res| res.error.is_some()) { - let addition = builder.execute()?; + let addition = builder.execute().map_err(|e| Error::from_milli(e, Some(index_uid)))?; tracing::info!(indexing_result = ?addition, processed_in = ?started_processing_at.elapsed(), "document indexing done"); } else if primary_key_has_been_set { // Everything failed but we've set a primary key. @@ -1372,12 +1401,12 @@ impl IndexScheduler { builder.execute( |indexing_step| tracing::trace!(update = ?indexing_step), || must_stop_processing.clone().get(), - )?; + ).map_err(|e| Error::from_milli(e, Some(index_uid)))?; } Ok(tasks) } - IndexOperation::DocumentEdition { mut task, .. } => { + IndexOperation::DocumentEdition { index_uid, mut task } => { let (filter, context, function) = if let KindWithContent::DocumentEdition { filter_expr, context, function, .. @@ -1395,6 +1424,7 @@ impl IndexScheduler { self.index_mapper.indexer_config(), self.must_stop_processing.clone(), index, + index_uid ); let (original_filter, context, function) = if let Some(Details::DocumentEdition { original_filter, @@ -1435,7 +1465,7 @@ impl IndexScheduler { Ok(vec![task]) } - IndexOperation::DocumentDeletion { mut tasks, index_uid: _ } => { + IndexOperation::DocumentDeletion { mut tasks, index_uid } => { let mut to_delete = RoaringBitmap::new(); let external_documents_ids = index.external_documents_ids(); @@ -1456,7 +1486,7 @@ impl IndexScheduler { deleted_documents: Some(will_be_removed), }); } - KindWithContent::DocumentDeletionByFilter { index_uid: _, filter_expr } => { + KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr } => { let before = to_delete.len(); let filter = match Filter::from_json(filter_expr) { Ok(filter) => filter, @@ -1467,7 +1497,7 @@ impl IndexScheduler { milli::Error::UserError( milli::UserError::InvalidFilterExpression { .. }, ) => Some( - Error::from(err) + Error::from_milli(err, Some(index_uid.clone())) .with_custom_error_code(Code::InvalidDocumentFilter) .into(), ), @@ -1481,9 +1511,9 @@ impl IndexScheduler { filter.evaluate(index_wtxn, index).map_err(|err| match err { milli::Error::UserError( milli::UserError::InvalidFilter(_), - ) => Error::from(err) + ) => Error::from_milli(err, Some(index_uid.clone())) .with_custom_error_code(Code::InvalidDocumentFilter), - e => e.into(), + e => Error::from_milli(e, Some(index_uid.clone())), }); match candidates { Ok(candidates) => to_delete |= candidates, @@ -1522,17 +1552,18 @@ impl IndexScheduler { config, |indexing_step| tracing::debug!(update = ?indexing_step), || must_stop_processing.get(), - )?; + ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; let (new_builder, _count) = - builder.remove_documents_from_db_no_batch(&to_delete)?; + builder.remove_documents_from_db_no_batch(&to_delete) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; builder = new_builder; - let _ = builder.execute()?; + let _ = builder.execute().map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; Ok(tasks) } - IndexOperation::Settings { index_uid: _, settings, mut tasks } => { + IndexOperation::Settings { index_uid, settings, mut tasks } => { let indexer_config = self.index_mapper.indexer_config(); let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config); @@ -1550,7 +1581,7 @@ impl IndexScheduler { builder.execute( |indexing_step| tracing::debug!(update = ?indexing_step), || must_stop_processing.get(), - )?; + ).map_err(|e| Error::from_milli(e, Some(index_uid)))?; Ok(tasks) } @@ -1742,16 +1773,17 @@ fn edit_documents_by_function<'a>( indexer_config: &IndexerConfig, must_stop_processing: MustStopProcessing, index: &'a Index, + index_uid: String, ) -> Result<(u64, u64)> { let candidates = match filter.as_ref().map(Filter::from_json) { Some(Ok(Some(filter))) => filter.evaluate(wtxn, index).map_err(|err| match err { milli::Error::UserError(milli::UserError::InvalidFilter(_)) => { - Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter) + Error::from_milli(err, Some(index_uid.clone())).with_custom_error_code(Code::InvalidDocumentFilter) } - e => e.into(), + e => Error::from_milli(e.into(), Some(index_uid.clone())), })?, None | Some(Ok(None)) => index.documents_ids(wtxn)?, - Some(Err(e)) => return Err(e.into()), + Some(Err(e)) => return Err(Error::from_milli(e.into(), Some(index_uid.clone()))), }; let config = IndexDocumentsConfig { @@ -1766,11 +1798,15 @@ fn edit_documents_by_function<'a>( config, |indexing_step| tracing::debug!(update = ?indexing_step), || must_stop_processing.get(), - )?; + ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; - let (new_builder, count) = builder.edit_documents(&candidates, context, code)?; + let (new_builder, count) = builder.edit_documents( + &candidates, + context, + code + ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; builder = new_builder; - let _ = builder.execute()?; + let _ = builder.execute().map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; Ok(count.unwrap()) } diff --git a/crates/index-scheduler/src/error.rs b/crates/index-scheduler/src/error.rs index 3bd378fd6..91579fd68 100644 --- a/crates/index-scheduler/src/error.rs +++ b/crates/index-scheduler/src/error.rs @@ -117,8 +117,11 @@ pub enum Error { Dump(#[from] dump::Error), #[error(transparent)] Heed(#[from] heed::Error), - #[error(transparent)] - Milli(#[from] milli::Error), + #[error("{}", match .index_name { + Some(name) if !name.is_empty() => format!("Index `{}`: {error}", name), + _ => format!("{error}") + })] + Milli { error: milli::Error, index_name: Option }, #[error("An unexpected crash occurred when processing the task.")] ProcessBatchPanicked, #[error(transparent)] @@ -183,7 +186,7 @@ impl Error { | Error::AbortedTask | Error::Dump(_) | Error::Heed(_) - | Error::Milli(_) + | Error::Milli { .. } | Error::ProcessBatchPanicked | Error::FileStore(_) | Error::IoError(_) @@ -202,6 +205,10 @@ impl Error { pub fn with_custom_error_code(self, code: Code) -> Self { Self::WithCustomErrorCode(code, Box::new(self)) } + + pub fn from_milli(error: milli::Error, index_name: Option) -> Self { + Self::Milli { error, index_name } + } } impl ErrorCode for Error { @@ -227,7 +234,7 @@ impl ErrorCode for Error { // TODO: not sure of the Code to use Error::NoSpaceLeftInTaskQueue => Code::NoSpaceLeftOnDevice, Error::Dump(e) => e.error_code(), - Error::Milli(e) => e.error_code(), + Error::Milli { error, .. } => error.error_code(), Error::ProcessBatchPanicked => Code::Internal, Error::Heed(e) => e.error_code(), Error::HeedTransaction(e) => e.error_code(), diff --git a/crates/index-scheduler/src/index_mapper/index_map.rs b/crates/index-scheduler/src/index_mapper/index_map.rs index f8080d23b..c20782068 100644 --- a/crates/index-scheduler/src/index_mapper/index_map.rs +++ b/crates/index-scheduler/src/index_mapper/index_map.rs @@ -3,13 +3,13 @@ use std::path::Path; use std::time::Duration; use meilisearch_types::heed::{EnvClosingEvent, EnvFlags, EnvOpenOptions}; -use meilisearch_types::milli::Index; +use meilisearch_types::milli::{Index, Result}; use time::OffsetDateTime; use uuid::Uuid; use super::IndexStatus::{self, Available, BeingDeleted, Closing, Missing}; use crate::lru::{InsertionOutcome, LruMap}; -use crate::{clamp_to_page_size, Result}; +use crate::{clamp_to_page_size}; /// Keep an internally consistent view of the open indexes in memory. /// diff --git a/crates/index-scheduler/src/index_mapper/mod.rs b/crates/index-scheduler/src/index_mapper/mod.rs index 3cccb5a69..500e4cf83 100644 --- a/crates/index-scheduler/src/index_mapper/mod.rs +++ b/crates/index-scheduler/src/index_mapper/mod.rs @@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize}; use time::OffsetDateTime; use tracing::error; use uuid::Uuid; - +use meilisearch_types::milli; use self::index_map::IndexMap; use self::IndexStatus::{Available, BeingDeleted, Closing, Missing}; use crate::uuid_codec::UuidCodec; @@ -121,7 +121,7 @@ impl IndexStats { /// # Parameters /// /// - rtxn: a RO transaction for the index, obtained from `Index::read_txn()`. - pub fn new(index: &Index, rtxn: &RoTxn) -> Result { + pub fn new(index: &Index, rtxn: &RoTxn) -> milli::Result { Ok(IndexStats { number_of_documents: index.number_of_documents(rtxn)?, database_size: index.on_disk_size()?, @@ -189,7 +189,7 @@ impl IndexMapper { date, self.enable_mdb_writemap, self.index_base_map_size, - )?; + ).map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; wtxn.commit()?; @@ -357,7 +357,8 @@ impl IndexMapper { }; let index_path = self.base_path.join(uuid.to_string()); // take the lock to reopen the environment. - reopen.reopen(&mut self.index_map.write().unwrap(), &index_path)?; + reopen.reopen(&mut self.index_map.write().unwrap(), &index_path) + .map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; continue; } BeingDeleted => return Err(Error::IndexNotFound(name.to_string())), @@ -378,7 +379,7 @@ impl IndexMapper { None, self.enable_mdb_writemap, self.index_base_map_size, - )?; + ).map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; } Available(index) => break index, Closing(_) => { @@ -459,7 +460,7 @@ impl IndexMapper { None => { let index = self.index(rtxn, index_uid)?; let index_rtxn = index.read_txn()?; - IndexStats::new(&index, &index_rtxn) + IndexStats::new(&index, &index_rtxn).map_err(|e| Error::from_milli(e, Some(uuid.to_string()))) } } } diff --git a/crates/index-scheduler/src/lib.rs b/crates/index-scheduler/src/lib.rs index e0e2bfb75..3a2c532d4 100644 --- a/crates/index-scheduler/src/lib.rs +++ b/crates/index-scheduler/src/lib.rs @@ -1210,9 +1210,9 @@ impl IndexScheduler { tracing::info!("A batch of tasks was successfully completed with {success} successful tasks and {failure} failed tasks."); } // If we have an abortion error we must stop the tick here and re-schedule tasks. - Err(Error::Milli(milli::Error::InternalError( - milli::InternalError::AbortedIndexation, - ))) + Err(Error::Milli{ + error: milli::Error::InternalError(milli::InternalError::AbortedIndexation), .. + }) | Err(Error::AbortedTask) => { #[cfg(test)] self.breakpoint(Breakpoint::AbortedIndexation); @@ -1231,9 +1231,9 @@ impl IndexScheduler { // 2. close the associated environment // 3. resize it // 4. re-schedule tasks - Err(Error::Milli(milli::Error::UserError( - milli::UserError::MaxDatabaseSizeReached, - ))) if index_uid.is_some() => { + Err(Error::Milli { + error: milli::Error::UserError(milli::UserError::MaxDatabaseSizeReached), .. + }) if index_uid.is_some() => { // fixme: add index_uid to match to avoid the unwrap let index_uid = index_uid.unwrap(); // fixme: handle error more gracefully? not sure when this could happen @@ -1470,6 +1470,7 @@ impl IndexScheduler { // TODO: consider using a type alias or a struct embedder/template pub fn embedders( &self, + index_uid: String, embedding_configs: Vec, ) -> Result { let res: Result<_> = embedding_configs @@ -1481,7 +1482,10 @@ impl IndexScheduler { .. }| { let prompt = - Arc::new(prompt.try_into().map_err(meilisearch_types::milli::Error::from)?); + Arc::new(prompt.try_into() + .map_err(meilisearch_types::milli::Error::from) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))? + ); // optimistically return existing embedder { let embedders = self.embedders.read().unwrap(); @@ -1497,7 +1501,8 @@ impl IndexScheduler { let embedder = Arc::new( Embedder::new(embedder_options.clone()) .map_err(meilisearch_types::milli::vector::Error::from) - .map_err(meilisearch_types::milli::Error::from)?, + .map_err(meilisearch_types::milli::Error::from) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?, ); { let mut embedders = self.embedders.write().unwrap(); diff --git a/crates/meilisearch/src/error.rs b/crates/meilisearch/src/error.rs index 5c4ce171f..6e7283a18 100644 --- a/crates/meilisearch/src/error.rs +++ b/crates/meilisearch/src/error.rs @@ -7,6 +7,7 @@ use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError}; use meilisearch_types::milli::OrderBy; use serde_json::Value; use tokio::task::JoinError; +use meilisearch_types::milli; #[derive(Debug, thiserror::Error)] pub enum MeilisearchHttpError { @@ -62,8 +63,11 @@ pub enum MeilisearchHttpError { HeedError(#[from] meilisearch_types::heed::Error), #[error(transparent)] IndexScheduler(#[from] index_scheduler::Error), - #[error(transparent)] - Milli(#[from] meilisearch_types::milli::Error), + #[error("{}", match .index_name { + Some(name) if !name.is_empty() => format!("Index `{}`: {error}", name), + _ => format!("{error}") + })] + Milli { error: meilisearch_types::milli::Error, index_name: Option }, #[error(transparent)] Payload(#[from] PayloadError), #[error(transparent)] @@ -76,6 +80,12 @@ pub enum MeilisearchHttpError { MissingSearchHybrid, } +impl MeilisearchHttpError { + pub(crate) fn from_milli(error: milli::Error, index_name: Option) -> Self { + Self::Milli { error, index_name } + } +} + impl ErrorCode for MeilisearchHttpError { fn error_code(&self) -> Code { match self { @@ -95,7 +105,7 @@ impl ErrorCode for MeilisearchHttpError { MeilisearchHttpError::SerdeJson(_) => Code::Internal, MeilisearchHttpError::HeedError(_) => Code::Internal, MeilisearchHttpError::IndexScheduler(e) => e.error_code(), - MeilisearchHttpError::Milli(e) => e.error_code(), + MeilisearchHttpError::Milli{error, ..} => error.error_code(), MeilisearchHttpError::Payload(e) => e.error_code(), MeilisearchHttpError::FileStore(_) => Code::Internal, MeilisearchHttpError::DocumentFormat(e) => e.error_code(), diff --git a/crates/meilisearch/src/lib.rs b/crates/meilisearch/src/lib.rs index 633ad2776..779af63f2 100644 --- a/crates/meilisearch/src/lib.rs +++ b/crates/meilisearch/src/lib.rs @@ -395,6 +395,7 @@ fn import_dump( for index_reader in dump_reader.indexes()? { let mut index_reader = index_reader?; let metadata = index_reader.metadata(); + let uid = metadata.uid.clone(); tracing::info!("Importing index `{}`.", metadata.uid); let date = Some((metadata.created_at, metadata.updated_at)); @@ -432,7 +433,7 @@ fn import_dump( let reader = DocumentsBatchReader::from_reader(reader)?; let embedder_configs = index.embedding_configs(&wtxn)?; - let embedders = index_scheduler.embedders(embedder_configs)?; + let embedders = index_scheduler.embedders(uid, embedder_configs)?; let builder = milli::update::IndexDocuments::new( &mut wtxn, diff --git a/crates/meilisearch/src/routes/indexes/facet_search.rs b/crates/meilisearch/src/routes/indexes/facet_search.rs index 99a4a4f28..fc29d3406 100644 --- a/crates/meilisearch/src/routes/indexes/facet_search.rs +++ b/crates/meilisearch/src/routes/indexes/facet_search.rs @@ -185,7 +185,7 @@ pub async fn search( let index = index_scheduler.index(&index_uid)?; let features = index_scheduler.features(); - let search_kind = search_kind(&search_query, &index_scheduler, &index, features)?; + let search_kind = search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index, features)?; let permit = search_queue.try_get_search_permit().await?; let search_result = tokio::task::spawn_blocking(move || { perform_facet_search( diff --git a/crates/meilisearch/src/routes/indexes/mod.rs b/crates/meilisearch/src/routes/indexes/mod.rs index 7d073ec5f..1dda27a98 100644 --- a/crates/meilisearch/src/routes/indexes/mod.rs +++ b/crates/meilisearch/src/routes/indexes/mod.rs @@ -5,7 +5,7 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use deserr::actix_web::{AwebJson, AwebQueryParameter}; use deserr::{DeserializeError, Deserr, ValuePointerRef}; -use index_scheduler::IndexScheduler; +use index_scheduler::{Error, IndexScheduler}; use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::error::deserr_codes::*; @@ -107,7 +107,7 @@ pub async fn list_indexes( if !filters.is_index_authorized(uid) { return Ok(None); } - Ok(Some(IndexView::new(uid.to_string(), index)?)) + Ok(Some(IndexView::new(uid.to_string(), index).map_err(|e| Error::from_milli(e, Some(uid.to_string())))?)) })?; // Won't cause to open all indexes because IndexView doesn't keep the `Index` opened. let indexes: Vec = indexes.into_iter().flatten().collect(); diff --git a/crates/meilisearch/src/routes/indexes/search.rs b/crates/meilisearch/src/routes/indexes/search.rs index 2f5cb4a36..609439b4a 100644 --- a/crates/meilisearch/src/routes/indexes/search.rs +++ b/crates/meilisearch/src/routes/indexes/search.rs @@ -243,11 +243,11 @@ pub async fn search_with_url_query( let index = index_scheduler.index(&index_uid)?; let features = index_scheduler.features(); - let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)?; + let search_kind = search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?; let permit = search_queue.try_get_search_permit().await?; let search_result = tokio::task::spawn_blocking(move || { - perform_search(&index, query, search_kind, retrieve_vector, index_scheduler.features()) + perform_search(index_uid.to_string(), &index, query, search_kind, retrieve_vector, index_scheduler.features()) }) .await; permit.drop().await; @@ -287,12 +287,12 @@ pub async fn search_with_post( let features = index_scheduler.features(); - let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)?; + let search_kind = search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?; let permit = search_queue.try_get_search_permit().await?; let search_result = tokio::task::spawn_blocking(move || { - perform_search(&index, query, search_kind, retrieve_vectors, index_scheduler.features()) + perform_search(index_uid.to_string(), &index, query, search_kind, retrieve_vectors, index_scheduler.features()) }) .await; permit.drop().await; @@ -314,6 +314,7 @@ pub async fn search_with_post( pub fn search_kind( query: &SearchQuery, index_scheduler: &IndexScheduler, + index_uid: String, index: &milli::Index, features: RoFeatures, ) -> Result { @@ -332,7 +333,7 @@ pub fn search_kind( (None, _, None) => Ok(SearchKind::KeywordOnly), // hybrid.semantic_ratio == 1.0 => vector (_, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => { - SearchKind::semantic(index_scheduler, index, embedder, v.map(|v| v.len())) + SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len())) } // hybrid.semantic_ratio == 0.0 => keyword (_, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => { @@ -340,13 +341,14 @@ pub fn search_kind( } // no query, hybrid, vector => semantic (None, Some(HybridQuery { semantic_ratio: _, embedder }), Some(v)) => { - SearchKind::semantic(index_scheduler, index, embedder, Some(v.len())) + SearchKind::semantic(index_scheduler, index_uid, index, embedder, Some(v.len())) } // query, no hybrid, no vector => keyword (Some(_), None, None) => Ok(SearchKind::KeywordOnly), // query, hybrid, maybe vector => hybrid (Some(_), Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid( index_scheduler, + index_uid, index, embedder, **semantic_ratio, diff --git a/crates/meilisearch/src/routes/indexes/similar.rs b/crates/meilisearch/src/routes/indexes/similar.rs index 79f42f0aa..a0fccff52 100644 --- a/crates/meilisearch/src/routes/indexes/similar.rs +++ b/crates/meilisearch/src/routes/indexes/similar.rs @@ -104,7 +104,7 @@ async fn similar( let index = index_scheduler.index(&index_uid)?; let (embedder_name, embedder, quantized) = - SearchKind::embedder(&index_scheduler, &index, &query.embedder, None)?; + SearchKind::embedder(&index_scheduler, index_uid.to_string(), &index, &query.embedder, None)?; tokio::task::spawn_blocking(move || { perform_similar( diff --git a/crates/meilisearch/src/routes/multi_search.rs b/crates/meilisearch/src/routes/multi_search.rs index f8b1bc6ee..c4496e41c 100644 --- a/crates/meilisearch/src/routes/multi_search.rs +++ b/crates/meilisearch/src/routes/multi_search.rs @@ -125,14 +125,16 @@ pub async fn multi_search_with_post( }) .with_index(query_index)?; + let index_uid_str = index_uid.to_string(); + let search_kind = - search_kind(&query, index_scheduler.get_ref(), &index, features) + search_kind(&query, index_scheduler.get_ref(), index_uid_str.clone(), &index, features) .with_index(query_index)?; let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features) .with_index(query_index)?; let search_result = tokio::task::spawn_blocking(move || { - perform_search(&index, query, search_kind, retrieve_vector, features) + perform_search(index_uid_str.clone(), &index, query, search_kind, retrieve_vector, features) }) .await .with_index(query_index)?; diff --git a/crates/meilisearch/src/search/federated.rs b/crates/meilisearch/src/search/federated.rs index 5279c26bb..5aae82c66 100644 --- a/crates/meilisearch/src/search/federated.rs +++ b/crates/meilisearch/src/search/federated.rs @@ -560,7 +560,7 @@ pub fn perform_federated_search( // use an immediately invoked lambda to capture the result without returning from the function let res: Result<(), ResponseError> = (|| { - let search_kind = search_kind(&query, index_scheduler, &index, features)?; + let search_kind = search_kind(&query, index_scheduler, index_uid.to_string(), &index, features)?; let canonicalization_kind = match (&search_kind, &query.q) { (SearchKind::SemanticOnly { .. }, _) => { @@ -636,7 +636,7 @@ pub fn perform_federated_search( search.offset(0); search.limit(required_hit_count); - let (result, _semantic_hit_count) = super::search_from_kind(search_kind, search)?; + let (result, _semantic_hit_count) = super::search_from_kind(index_uid.to_string(), search_kind, search)?; let format = AttributesFormat { attributes_to_retrieve: query.attributes_to_retrieve, retrieve_vectors, @@ -670,7 +670,8 @@ pub fn perform_federated_search( let formatter_builder = HitMaker::formatter_builder(matching_words, tokenizer); - let hit_maker = HitMaker::new(&index, &rtxn, format, formatter_builder)?; + let hit_maker = HitMaker::new(&index, &rtxn, format, formatter_builder) + .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; results_by_query.push(SearchResultByQuery { federation_options, diff --git a/crates/meilisearch/src/search/mod.rs b/crates/meilisearch/src/search/mod.rs index 7832c1761..9c386eeb2 100644 --- a/crates/meilisearch/src/search/mod.rs +++ b/crates/meilisearch/src/search/mod.rs @@ -19,7 +19,7 @@ use meilisearch_types::locales::Locale; use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy}; use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors; use meilisearch_types::milli::vector::Embedder; -use meilisearch_types::milli::{FacetValueHit, OrderBy, SearchForFacetValues, TimeBudget}; +use meilisearch_types::milli::{FacetValueHit, InternalError, OrderBy, SearchForFacetValues, TimeBudget}; use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; use meilisearch_types::{milli, Document}; use milli::tokenizer::{Language, TokenizerBuilder}; @@ -281,35 +281,38 @@ pub enum SearchKind { impl SearchKind { pub(crate) fn semantic( index_scheduler: &index_scheduler::IndexScheduler, + index_uid: String, index: &Index, embedder_name: &str, vector_len: Option, ) -> Result { let (embedder_name, embedder, quantized) = - Self::embedder(index_scheduler, index, embedder_name, vector_len)?; + Self::embedder(index_scheduler, index_uid, index, embedder_name, vector_len)?; Ok(Self::SemanticOnly { embedder_name, embedder, quantized }) } pub(crate) fn hybrid( index_scheduler: &index_scheduler::IndexScheduler, + index_uid: String, index: &Index, embedder_name: &str, semantic_ratio: f32, vector_len: Option, ) -> Result { let (embedder_name, embedder, quantized) = - Self::embedder(index_scheduler, index, embedder_name, vector_len)?; + Self::embedder(index_scheduler, index_uid, index, embedder_name, vector_len)?; Ok(Self::Hybrid { embedder_name, embedder, quantized, semantic_ratio }) } pub(crate) fn embedder( index_scheduler: &index_scheduler::IndexScheduler, + index_uid: String, index: &Index, embedder_name: &str, vector_len: Option, ) -> Result<(String, Arc, bool), ResponseError> { let embedder_configs = index.embedding_configs(&index.read_txn()?)?; - let embedders = index_scheduler.embedders(embedder_configs)?; + let embedders = index_scheduler.embedders(index_uid, embedder_configs)?; let (embedder, _, quantized) = embedders .get(embedder_name) @@ -888,6 +891,7 @@ fn prepare_search<'t>( } pub fn perform_search( + index_uid: String, index: &Index, query: SearchQuery, search_kind: SearchKind, @@ -914,7 +918,7 @@ pub fn perform_search( used_negative_operator, }, semantic_hit_count, - ) = search_from_kind(search_kind, search)?; + ) = search_from_kind(index_uid, search_kind, search)?; let SearchQuery { q, @@ -1067,17 +1071,24 @@ fn compute_facet_distribution_stats>( } pub fn search_from_kind( + index_uid: String, search_kind: SearchKind, search: milli::Search<'_>, ) -> Result<(milli::SearchResult, Option), MeilisearchHttpError> { let (milli_result, semantic_hit_count) = match &search_kind { - SearchKind::KeywordOnly => (search.execute()?, None), + SearchKind::KeywordOnly => { + let results = search.execute() + .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; + (results, None) + }, SearchKind::SemanticOnly { .. } => { - let results = search.execute()?; + let results = search.execute() + .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; let semantic_hit_count = results.document_scores.len() as u32; (results, Some(semantic_hit_count)) } - SearchKind::Hybrid { semantic_ratio, .. } => search.execute_hybrid(*semantic_ratio)?, + SearchKind::Hybrid { semantic_ratio, .. } => search.execute_hybrid(*semantic_ratio) + .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid)))?, }; Ok((milli_result, semantic_hit_count)) } @@ -1179,7 +1190,7 @@ impl<'a> HitMaker<'a> { rtxn: &'a RoTxn<'a>, format: AttributesFormat, mut formatter_builder: MatcherBuilder<'a>, - ) -> Result { + ) -> milli::Result { formatter_builder.crop_marker(format.crop_marker); formatter_builder.highlight_prefix(format.highlight_pre_tag); formatter_builder.highlight_suffix(format.highlight_post_tag); @@ -1278,7 +1289,7 @@ impl<'a> HitMaker<'a> { &self, id: u32, score: &[ScoreDetails], - ) -> Result { + ) -> milli::Result { let (_, obkv) = self.index.iter_documents(self.rtxn, std::iter::once(id))?.next().unwrap()?; @@ -1321,7 +1332,7 @@ impl<'a> HitMaker<'a> { .is_some_and(|conf| conf.user_provided.contains(id)); let embeddings = ExplicitVectors { embeddings: Some(vector.into()), regenerate: !user_provided }; - vectors.insert(name, serde_json::to_value(embeddings)?); + vectors.insert(name, serde_json::to_value(embeddings).map_err(InternalError::SerdeJson)?); } document.insert("_vectors".into(), vectors.into()); } @@ -1367,7 +1378,7 @@ fn make_hits<'a>( format: AttributesFormat, matching_words: milli::MatchingWords, documents_ids_scores: impl Iterator)> + 'a, -) -> Result, MeilisearchHttpError> { +) -> milli::Result> { let mut documents = Vec::new(); let dictionary = index.dictionary(rtxn)?; @@ -1688,12 +1699,12 @@ fn make_document( displayed_attributes: &BTreeSet, field_ids_map: &FieldsIdsMap, obkv: obkv::KvReaderU16, -) -> Result { +) -> milli::Result { let mut document = serde_json::Map::new(); // recreate the original json for (key, value) in obkv.iter() { - let value = serde_json::from_slice(value)?; + let value = serde_json::from_slice(value).map_err(InternalError::SerdeJson)?; let key = field_ids_map.name(key).expect("Missing field name").to_string(); document.insert(key, value); @@ -1718,7 +1729,7 @@ fn format_fields( displayable_ids: &BTreeSet, locales: Option<&[Language]>, localized_attributes: &[LocalizedAttributesRule], -) -> Result<(Option, Document), MeilisearchHttpError> { +) -> milli::Result<(Option, Document)> { let mut matches_position = compute_matches.then(BTreeMap::new); let mut document = document.clone(); @@ -1926,7 +1937,7 @@ fn parse_filter_array(arr: &[Value]) -> Result, MeilisearchHttpEr } } - Ok(Filter::from_array(ands)?) + Ok(Filter::from_array(ands).map_err(|e|MeilisearchHttpError::from_milli(e,None))?) } #[cfg(test)] From fdcd483740d54926e616cfd280d88780072d6efa Mon Sep 17 00:00:00 2001 From: airycanon Date: Thu, 14 Nov 2024 16:40:38 +0800 Subject: [PATCH 2/2] fix tests --- crates/index-scheduler/src/batch.rs | 262 ++++++++++-------- .../src/index_mapper/index_map.rs | 3 +- .../index-scheduler/src/index_mapper/mod.rs | 51 ++-- crates/index-scheduler/src/lib.rs | 19 +- .../after_removing_the_documents.snap | 4 +- .../fifth_task_succeeds.snap | 2 +- .../fourth_task_fails.snap | 2 +- .../second_task_fails.snap | 2 +- .../third_task_fails.snap | 4 +- .../second_and_third_tasks_fails.snap | 2 +- .../all_other_tasks_succeeds.snap | 2 +- .../second_task_fails.snap | 2 +- .../third_task_succeeds.snap | 2 +- crates/meilisearch/src/error.rs | 4 +- .../src/routes/indexes/facet_search.rs | 3 +- crates/meilisearch/src/routes/indexes/mod.rs | 5 +- .../meilisearch/src/routes/indexes/search.rs | 24 +- .../meilisearch/src/routes/indexes/similar.rs | 9 +- crates/meilisearch/src/routes/multi_search.rs | 20 +- crates/meilisearch/src/search/federated.rs | 12 +- crates/meilisearch/src/search/mod.rs | 28 +- .../tests/documents/add_documents.rs | 4 +- crates/meilisearch/tests/documents/errors.rs | 4 +- .../meilisearch/tests/index/update_index.rs | 2 +- crates/meilisearch/tests/search/errors.rs | 32 +-- crates/meilisearch/tests/search/multi.rs | 12 +- crates/meilisearch/tests/tasks/mod.rs | 2 +- .../tests/vector/binary_quantized.rs | 2 +- crates/meilisearch/tests/vector/mod.rs | 24 +- crates/meilisearch/tests/vector/openai.rs | 4 +- crates/meilisearch/tests/vector/rest.rs | 24 +- 31 files changed, 322 insertions(+), 250 deletions(-) diff --git a/crates/index-scheduler/src/batch.rs b/crates/index-scheduler/src/batch.rs index 176bdd410..44b4663e2 100644 --- a/crates/index-scheduler/src/batch.rs +++ b/crates/index-scheduler/src/batch.rs @@ -448,20 +448,20 @@ impl IndexScheduler { match (document_import, settings) { ( Some(Batch::IndexOperation { - op: - IndexOperation::DocumentOperation { - primary_key, - documents_counts, - operations, - tasks: document_import_tasks, - .. - }, - .. - }), + op: + IndexOperation::DocumentOperation { + primary_key, + documents_counts, + operations, + tasks: document_import_tasks, + .. + }, + .. + }), Some(Batch::IndexOperation { - op: IndexOperation::Settings { settings, tasks: settings_tasks, .. }, - .. - }), + op: IndexOperation::Settings { settings, tasks: settings_tasks, .. }, + .. + }), ) => Ok(Some(Batch::IndexOperation { op: IndexOperation::SettingsAndDocumentOperation { index_uid, @@ -618,12 +618,7 @@ impl IndexScheduler { /// The list of tasks that were processed. The metadata of each task in the returned /// list is updated accordingly, with the exception of the its date fields /// [`finished_at`](meilisearch_types::tasks::Task::finished_at) and [`started_at`](meilisearch_types::tasks::Task::started_at). - #[tracing::instrument( - level = "trace", - skip(self, batch), - target = "indexing::scheduler", - fields(batch=batch.to_string()) - )] + #[tracing::instrument(level = "trace", skip(self, batch), target = "indexing::scheduler", fields(batch=batch.to_string()))] pub(crate) fn process_batch(&self, batch: Batch) -> Result> { #[cfg(test)] { @@ -656,10 +651,10 @@ impl IndexScheduler { task.status = Status::Succeeded; match &mut task.details { Some(Details::TaskCancelation { - matched_tasks: _, - canceled_tasks, - original_filter: _, - }) => { + matched_tasks: _, + canceled_tasks, + original_filter: _, + }) => { *canceled_tasks = Some(canceled_tasks_content_uuids.len() as u64); } _ => unreachable!(), @@ -713,10 +708,10 @@ impl IndexScheduler { match &mut task.details { Some(Details::TaskDeletion { - matched_tasks: _, - deleted_tasks, - original_filter: _, - }) => { + matched_tasks: _, + deleted_tasks, + original_filter: _, + }) => { *deleted_tasks = Some(deleted_tasks_count); } _ => unreachable!(), @@ -772,7 +767,8 @@ impl IndexScheduler { let index = self.index_mapper.index(&rtxn, name)?; let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string()); fs::create_dir_all(&dst)?; - index.copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled) + index + .copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled) .map_err(|e| Error::from_milli(e, Some(name.to_string())))?; } @@ -873,25 +869,21 @@ impl IndexScheduler { if status == Status::Enqueued { let content_file = self.file_store.get_update(content_file)?; - let reader = DocumentsBatchReader::from_reader(content_file) - .map_err(|e| Error::Milli { - error: e.into(), - index_name: index_uid.clone(), + let reader = + DocumentsBatchReader::from_reader(content_file).map_err(|e| { + Error::Milli { error: e.into(), index_name: index_uid.clone() } })?; let (mut cursor, documents_batch_index) = reader.into_cursor_and_fields_index(); - while let Some(doc) = cursor.next_document() - .map_err(|e| Error::Milli { - error: e.into(), - index_name: index_uid.clone(), - })? - { - dump_content_file.push_document(&obkv_to_object( - &doc, - &documents_batch_index, - ).map_err(|e| Error::from_milli(e, index_uid.clone()))?)?; + while let Some(doc) = cursor.next_document().map_err(|e| { + Error::Milli { error: e.into(), index_name: index_uid.clone() } + })? { + dump_content_file.push_document( + &obkv_to_object(&doc, &documents_batch_index) + .map_err(|e| Error::from_milli(e, index_uid.clone()))?, + )?; } dump_content_file.flush()?; } @@ -905,18 +897,22 @@ impl IndexScheduler { let metadata = IndexMetadata { uid: uid.to_owned(), primary_key: index.primary_key(&rtxn)?.map(String::from), - created_at: index.created_at(&rtxn) + created_at: index + .created_at(&rtxn) .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?, - updated_at: index.updated_at(&rtxn) + updated_at: index + .updated_at(&rtxn) .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?, }; let mut index_dumper = dump.create_index(uid, &metadata)?; let fields_ids_map = index.fields_ids_map(&rtxn)?; let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect(); - let embedding_configs = index.embedding_configs(&rtxn) + let embedding_configs = index + .embedding_configs(&rtxn) .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; - let documents = index.all_documents(&rtxn) + let documents = index + .all_documents(&rtxn) .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; // 3.1. Dump the documents @@ -925,14 +921,16 @@ impl IndexScheduler { return Err(Error::AbortedTask); } - let (id, doc) = ret - .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; + let (id, doc) = + ret.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; - let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc) - .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; + let mut document = + milli::obkv_to_json(&all_fields, &fields_ids_map, doc) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; 'inject_vectors: { - let embeddings = index.embeddings(&rtxn, id) + let embeddings = index + .embeddings(&rtxn, id) .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; if embeddings.is_empty() { @@ -944,21 +942,24 @@ impl IndexScheduler { .or_insert(serde_json::Value::Object(Default::default())); let serde_json::Value::Object(vectors) = vectors else { - return Err(Error::from_milli(milli::Error::UserError( - milli::UserError::InvalidVectorsMapType { - document_id: { - if let Ok(Some(Ok(index))) = index - .external_id_of(&rtxn, std::iter::once(id)) - .map(|it| it.into_iter().next()) - { - index - } else { - format!("internal docid={id}") - } + return Err(Error::from_milli( + milli::Error::UserError( + milli::UserError::InvalidVectorsMapType { + document_id: { + if let Ok(Some(Ok(index))) = index + .external_id_of(&rtxn, std::iter::once(id)) + .map(|it| it.into_iter().next()) + { + index + } else { + format!("internal docid={id}") + } + }, + value: vectors.clone(), }, - value: vectors.clone(), - }, - ), Some(uid.to_string()))); + ), + Some(uid.to_string()), + )); }; for (embedder_name, embeddings) in embeddings { @@ -988,7 +989,8 @@ impl IndexScheduler { index, &rtxn, meilisearch_types::settings::SecretPolicy::RevealSecrets, - ).map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; + ) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; index_dumper.settings(&settings)?; Ok(()) })?; @@ -1078,10 +1080,12 @@ impl IndexScheduler { ); builder.set_primary_key(primary_key); let must_stop_processing = self.must_stop_processing.clone(); - builder.execute( - |indexing_step| tracing::debug!(update = ?indexing_step), - || must_stop_processing.get(), - ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + builder + .execute( + |indexing_step| tracing::debug!(update = ?indexing_step), + || must_stop_processing.get(), + ) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; index_wtxn.commit()?; } @@ -1122,11 +1126,11 @@ impl IndexScheduler { let number_of_documents = || -> Result { let index = self.index_mapper.index(&wtxn, &index_uid)?; let index_rtxn = index.read_txn()?; - Ok(index.number_of_documents(&index_rtxn) - .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))? - ) + index + .number_of_documents(&index_rtxn) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone()))) }() - .unwrap_or_default(); + .unwrap_or_default(); // The write transaction is directly owned and committed inside. match self.index_mapper.delete_index(wtxn, &index_uid) { @@ -1244,7 +1248,8 @@ impl IndexScheduler { ) -> Result> { match operation { IndexOperation::DocumentClear { index_uid, mut tasks, .. } => { - let count = milli::update::ClearDocuments::new(index_wtxn, index).execute() + let count = milli::update::ClearDocuments::new(index_wtxn, index) + .execute() .map_err(|e| Error::from_milli(e, Some(index_uid)))?; let mut first_clear_found = false; @@ -1284,10 +1289,10 @@ impl IndexScheduler { Some(pk) => { if primary_key != pk { return Err(Error::from_milli( - milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()).into(), + milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()) + .into(), Some(index_uid.clone()), - ) - .into()); + )); } } // if the primary key was set and there was no primary key set for this index @@ -1296,10 +1301,12 @@ impl IndexScheduler { let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config); builder.set_primary_key(primary_key); - builder.execute( - |indexing_step| tracing::debug!(update = ?indexing_step), - || must_stop_processing.clone().get(), - ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + builder + .execute( + |indexing_step| tracing::debug!(update = ?indexing_step), + || must_stop_processing.clone().get(), + ) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; primary_key_has_been_set = true; } } @@ -1307,7 +1314,8 @@ impl IndexScheduler { let config = IndexDocumentsConfig { update_method: method, ..Default::default() }; - let embedder_configs = index.embedding_configs(index_wtxn) + let embedder_configs = index + .embedding_configs(index_wtxn) .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; // TODO: consider Arc'ing the map too (we only need read access + we'll be cloning it multiple times, so really makes sense) let embedders = self.embedders(index_uid.clone(), embedder_configs)?; @@ -1319,15 +1327,19 @@ impl IndexScheduler { config, |indexing_step| tracing::trace!(?indexing_step, "Update"), || must_stop_processing.get(), - ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + ) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; for (operation, task) in operations.into_iter().zip(tasks.iter_mut()) { match operation { DocumentOperation::Add(content_uuid) => { let content_file = self.file_store.get_update(content_uuid)?; - let reader = DocumentsBatchReader::from_reader(content_file) - .map_err(|e| Error::from_milli(e.into(), Some(index_uid.clone())))?; - let (new_builder, user_result) = builder.add_documents(reader) + let reader = + DocumentsBatchReader::from_reader(content_file).map_err(|e| { + Error::from_milli(e.into(), Some(index_uid.clone())) + })?; + let (new_builder, user_result) = builder + .add_documents(reader) .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; builder = new_builder; @@ -1335,9 +1347,9 @@ impl IndexScheduler { let received_documents = if let Some(Details::DocumentAdditionOrUpdate { - received_documents, - .. - }) = task.details + received_documents, + .. + }) = task.details { received_documents } else { @@ -1364,9 +1376,9 @@ impl IndexScheduler { } } DocumentOperation::Delete(document_ids) => { - let (new_builder, user_result) = - builder.remove_documents(document_ids) - .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + let (new_builder, user_result) = builder + .remove_documents(document_ids) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; builder = new_builder; // Uses Invariant: remove documents actually always returns Ok for the inner result let count = user_result.unwrap(); @@ -1390,7 +1402,8 @@ impl IndexScheduler { } if !tasks.iter().all(|res| res.error.is_some()) { - let addition = builder.execute().map_err(|e| Error::from_milli(e, Some(index_uid)))?; + let addition = + builder.execute().map_err(|e| Error::from_milli(e, Some(index_uid)))?; tracing::info!(indexing_result = ?addition, processed_in = ?started_processing_at.elapsed(), "document indexing done"); } else if primary_key_has_been_set { // Everything failed but we've set a primary key. @@ -1398,10 +1411,12 @@ impl IndexScheduler { let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config); builder.reset_primary_key(); - builder.execute( - |indexing_step| tracing::trace!(update = ?indexing_step), - || must_stop_processing.clone().get(), - ).map_err(|e| Error::from_milli(e, Some(index_uid)))?; + builder + .execute( + |indexing_step| tracing::trace!(update = ?indexing_step), + || must_stop_processing.clone().get(), + ) + .map_err(|e| Error::from_milli(e, Some(index_uid)))?; } Ok(tasks) @@ -1424,8 +1439,14 @@ impl IndexScheduler { self.index_mapper.indexer_config(), self.must_stop_processing.clone(), index, - index_uid - ); + ) + .map_err(|err| match err { + milli::Error::UserError(milli::UserError::InvalidFilter(_)) => { + Error::from_milli(err, Some(index_uid.clone())) + .with_custom_error_code(Code::InvalidDocumentFilter) + } + e => Error::from_milli(e, Some(index_uid.clone())), + }); let (original_filter, context, function) = if let Some(Details::DocumentEdition { original_filter, context, @@ -1552,14 +1573,17 @@ impl IndexScheduler { config, |indexing_step| tracing::debug!(update = ?indexing_step), || must_stop_processing.get(), - ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + ) + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; let (new_builder, _count) = - builder.remove_documents_from_db_no_batch(&to_delete) + builder + .remove_documents_from_db_no_batch(&to_delete) .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; builder = new_builder; - let _ = builder.execute().map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + let _ = + builder.execute().map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; Ok(tasks) } @@ -1578,10 +1602,12 @@ impl IndexScheduler { } let must_stop_processing = self.must_stop_processing.clone(); - builder.execute( - |indexing_step| tracing::debug!(update = ?indexing_step), - || must_stop_processing.get(), - ).map_err(|e| Error::from_milli(e, Some(index_uid)))?; + builder + .execute( + |indexing_step| tracing::debug!(update = ?indexing_step), + || must_stop_processing.get(), + ) + .map_err(|e| Error::from_milli(e, Some(index_uid)))?; Ok(tasks) } @@ -1773,17 +1799,11 @@ fn edit_documents_by_function<'a>( indexer_config: &IndexerConfig, must_stop_processing: MustStopProcessing, index: &'a Index, - index_uid: String, -) -> Result<(u64, u64)> { +) -> milli::Result<(u64, u64)> { let candidates = match filter.as_ref().map(Filter::from_json) { - Some(Ok(Some(filter))) => filter.evaluate(wtxn, index).map_err(|err| match err { - milli::Error::UserError(milli::UserError::InvalidFilter(_)) => { - Error::from_milli(err, Some(index_uid.clone())).with_custom_error_code(Code::InvalidDocumentFilter) - } - e => Error::from_milli(e.into(), Some(index_uid.clone())), - })?, + Some(Ok(Some(filter))) => filter.evaluate(wtxn, index)?, None | Some(Ok(None)) => index.documents_ids(wtxn)?, - Some(Err(e)) => return Err(Error::from_milli(e.into(), Some(index_uid.clone()))), + Some(Err(e)) => return Err(e), }; let config = IndexDocumentsConfig { @@ -1798,15 +1818,11 @@ fn edit_documents_by_function<'a>( config, |indexing_step| tracing::debug!(update = ?indexing_step), || must_stop_processing.get(), - ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + )?; - let (new_builder, count) = builder.edit_documents( - &candidates, - context, - code - ).map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + let (new_builder, count) = builder.edit_documents(&candidates, context, code)?; builder = new_builder; - let _ = builder.execute().map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; + let _ = builder.execute()?; Ok(count.unwrap()) } diff --git a/crates/index-scheduler/src/index_mapper/index_map.rs b/crates/index-scheduler/src/index_mapper/index_map.rs index c20782068..480dafa7c 100644 --- a/crates/index-scheduler/src/index_mapper/index_map.rs +++ b/crates/index-scheduler/src/index_mapper/index_map.rs @@ -8,9 +8,8 @@ use time::OffsetDateTime; use uuid::Uuid; use super::IndexStatus::{self, Available, BeingDeleted, Closing, Missing}; +use crate::clamp_to_page_size; use crate::lru::{InsertionOutcome, LruMap}; -use crate::{clamp_to_page_size}; - /// Keep an internally consistent view of the open indexes in memory. /// /// This view is made of an LRU cache that will evict the least frequently used indexes when new indexes are opened. diff --git a/crates/index-scheduler/src/index_mapper/mod.rs b/crates/index-scheduler/src/index_mapper/mod.rs index 500e4cf83..8b9ef3597 100644 --- a/crates/index-scheduler/src/index_mapper/mod.rs +++ b/crates/index-scheduler/src/index_mapper/mod.rs @@ -3,19 +3,19 @@ use std::sync::{Arc, RwLock}; use std::time::Duration; use std::{fs, thread}; +use self::index_map::IndexMap; +use self::IndexStatus::{Available, BeingDeleted, Closing, Missing}; +use crate::uuid_codec::UuidCodec; +use crate::{Error, Result}; use meilisearch_types::heed::types::{SerdeJson, Str}; use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn}; +use meilisearch_types::milli; use meilisearch_types::milli::update::IndexerConfig; use meilisearch_types::milli::{FieldDistribution, Index}; use serde::{Deserialize, Serialize}; use time::OffsetDateTime; use tracing::error; use uuid::Uuid; -use meilisearch_types::milli; -use self::index_map::IndexMap; -use self::IndexStatus::{Available, BeingDeleted, Closing, Missing}; -use crate::uuid_codec::UuidCodec; -use crate::{Error, Result}; mod index_map; @@ -183,13 +183,18 @@ impl IndexMapper { // Error if the UUIDv4 somehow already exists in the map, since it should be fresh. // This is very unlikely to happen in practice. // TODO: it would be better to lazily create the index. But we need an Index::open function for milli. - let index = self.index_map.write().unwrap().create( - &uuid, - &index_path, - date, - self.enable_mdb_writemap, - self.index_base_map_size, - ).map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; + let index = self + .index_map + .write() + .unwrap() + .create( + &uuid, + &index_path, + date, + self.enable_mdb_writemap, + self.index_base_map_size, + ) + .map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; wtxn.commit()?; @@ -357,7 +362,8 @@ impl IndexMapper { }; let index_path = self.base_path.join(uuid.to_string()); // take the lock to reopen the environment. - reopen.reopen(&mut self.index_map.write().unwrap(), &index_path) + reopen + .reopen(&mut self.index_map.write().unwrap(), &index_path) .map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; continue; } @@ -373,13 +379,15 @@ impl IndexMapper { Missing => { let index_path = self.base_path.join(uuid.to_string()); - break index_map.create( - &uuid, - &index_path, - None, - self.enable_mdb_writemap, - self.index_base_map_size, - ).map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; + break index_map + .create( + &uuid, + &index_path, + None, + self.enable_mdb_writemap, + self.index_base_map_size, + ) + .map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; } Available(index) => break index, Closing(_) => { @@ -460,7 +468,8 @@ impl IndexMapper { None => { let index = self.index(rtxn, index_uid)?; let index_rtxn = index.read_txn()?; - IndexStats::new(&index, &index_rtxn).map_err(|e| Error::from_milli(e, Some(uuid.to_string()))) + IndexStats::new(&index, &index_rtxn) + .map_err(|e| Error::from_milli(e, Some(uuid.to_string()))) } } } diff --git a/crates/index-scheduler/src/lib.rs b/crates/index-scheduler/src/lib.rs index 3a2c532d4..c8a0a1212 100644 --- a/crates/index-scheduler/src/lib.rs +++ b/crates/index-scheduler/src/lib.rs @@ -1210,8 +1210,9 @@ impl IndexScheduler { tracing::info!("A batch of tasks was successfully completed with {success} successful tasks and {failure} failed tasks."); } // If we have an abortion error we must stop the tick here and re-schedule tasks. - Err(Error::Milli{ - error: milli::Error::InternalError(milli::InternalError::AbortedIndexation), .. + Err(Error::Milli { + error: milli::Error::InternalError(milli::InternalError::AbortedIndexation), + .. }) | Err(Error::AbortedTask) => { #[cfg(test)] @@ -1232,7 +1233,8 @@ impl IndexScheduler { // 3. resize it // 4. re-schedule tasks Err(Error::Milli { - error: milli::Error::UserError(milli::UserError::MaxDatabaseSizeReached), .. + error: milli::Error::UserError(milli::UserError::MaxDatabaseSizeReached), + .. }) if index_uid.is_some() => { // fixme: add index_uid to match to avoid the unwrap let index_uid = index_uid.unwrap(); @@ -1481,11 +1483,12 @@ impl IndexScheduler { config: milli::vector::EmbeddingConfig { embedder_options, prompt, quantized }, .. }| { - let prompt = - Arc::new(prompt.try_into() + let prompt = Arc::new( + prompt + .try_into() .map_err(meilisearch_types::milli::Error::from) - .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))? - ); + .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?, + ); // optimistically return existing embedder { let embedders = self.embedders.read().unwrap(); @@ -5204,7 +5207,7 @@ mod tests { insta::assert_json_snapshot!(simple_hf_config.embedder_options); let simple_hf_name = name.clone(); - let configs = index_scheduler.embedders(configs).unwrap(); + let configs = index_scheduler.embedders("doggos".to_string(), configs).unwrap(); let (hf_embedder, _, _) = configs.get(&simple_hf_name).unwrap(); let beagle_embed = hf_embedder.embed_one(S("Intel the beagle best doggo")).unwrap(); let lab_embed = hf_embedder.embed_one(S("Max the lab best doggo")).unwrap(); diff --git a/crates/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_deletion/after_removing_the_documents.snap b/crates/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_deletion/after_removing_the_documents.snap index 82748751e..5a4d294a9 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_deletion/after_removing_the_documents.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_deletion/after_removing_the_documents.snap @@ -9,8 +9,8 @@ source: index-scheduler/src/lib.rs 0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData }, is_deletion: false, allow_index_creation: true }} 1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} 2 {uid: 2, status: succeeded, details: { received_document_ids: 1, deleted_documents: Some(1) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Invalid type for filter subexpression: expected: String, Array, found: true.", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: true, deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Attribute `id` is not filterable. Available filterable attributes are: `catto`.\n1:3 id = 2", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: "id = 2", deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("id = 2") }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Invalid type for filter subexpression: expected: String, Array, found: true.", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: true, deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Attribute `id` is not filterable. Available filterable attributes are: `catto`.\n1:3 id = 2", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: "id = 2", deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("id = 2") }} 5 {uid: 5, status: succeeded, details: { original_filter: "catto EXISTS", deleted_documents: Some(1) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("catto EXISTS") }} ---------------------------------------------------------------------- ### Status: diff --git a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap index 85d7ba460..79bb76bc5 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap @@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs 0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} 4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} ---------------------------------------------------------------------- ### Status: diff --git a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap index 2a8748657..f69ec0cd0 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap @@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs 0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} 4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} ---------------------------------------------------------------------- ### Status: diff --git a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap index 18db64ca4..f801efbbd 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} ---------------------------------------------------------------------- ### Status: diff --git a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap index 4a9edc602..2b40e4973 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap @@ -7,8 +7,8 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} ---------------------------------------------------------------------- ### Status: enqueued [] diff --git a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap index 05eca0af1..f16c00bc7 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} ---------------------------------------------------------------------- ### Status: diff --git a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/all_other_tasks_succeeds.snap b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/all_other_tasks_succeeds.snap index 1f55e8f6d..76ae1abbe 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/all_other_tasks_succeeds.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/all_other_tasks_succeeds.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} 3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} 4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} diff --git a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap index 2920c3b2e..fc6473b74 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} 3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} 4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} diff --git a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap index 8981dd8c3..dd802db4c 100644 --- a/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap +++ b/crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} 3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} 4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} diff --git a/crates/meilisearch/src/error.rs b/crates/meilisearch/src/error.rs index 6e7283a18..b86ef699a 100644 --- a/crates/meilisearch/src/error.rs +++ b/crates/meilisearch/src/error.rs @@ -4,10 +4,10 @@ use byte_unit::{Byte, UnitType}; use meilisearch_types::document_formats::{DocumentFormatError, PayloadType}; use meilisearch_types::error::{Code, ErrorCode, ResponseError}; use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError}; +use meilisearch_types::milli; use meilisearch_types::milli::OrderBy; use serde_json::Value; use tokio::task::JoinError; -use meilisearch_types::milli; #[derive(Debug, thiserror::Error)] pub enum MeilisearchHttpError { @@ -105,7 +105,7 @@ impl ErrorCode for MeilisearchHttpError { MeilisearchHttpError::SerdeJson(_) => Code::Internal, MeilisearchHttpError::HeedError(_) => Code::Internal, MeilisearchHttpError::IndexScheduler(e) => e.error_code(), - MeilisearchHttpError::Milli{error, ..} => error.error_code(), + MeilisearchHttpError::Milli { error, .. } => error.error_code(), MeilisearchHttpError::Payload(e) => e.error_code(), MeilisearchHttpError::FileStore(_) => Code::Internal, MeilisearchHttpError::DocumentFormat(e) => e.error_code(), diff --git a/crates/meilisearch/src/routes/indexes/facet_search.rs b/crates/meilisearch/src/routes/indexes/facet_search.rs index fc29d3406..ff11f1305 100644 --- a/crates/meilisearch/src/routes/indexes/facet_search.rs +++ b/crates/meilisearch/src/routes/indexes/facet_search.rs @@ -185,7 +185,8 @@ pub async fn search( let index = index_scheduler.index(&index_uid)?; let features = index_scheduler.features(); - let search_kind = search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index, features)?; + let search_kind = + search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index, features)?; let permit = search_queue.try_get_search_permit().await?; let search_result = tokio::task::spawn_blocking(move || { perform_facet_search( diff --git a/crates/meilisearch/src/routes/indexes/mod.rs b/crates/meilisearch/src/routes/indexes/mod.rs index 1dda27a98..b2a85335b 100644 --- a/crates/meilisearch/src/routes/indexes/mod.rs +++ b/crates/meilisearch/src/routes/indexes/mod.rs @@ -107,7 +107,10 @@ pub async fn list_indexes( if !filters.is_index_authorized(uid) { return Ok(None); } - Ok(Some(IndexView::new(uid.to_string(), index).map_err(|e| Error::from_milli(e, Some(uid.to_string())))?)) + Ok(Some( + IndexView::new(uid.to_string(), index) + .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?, + )) })?; // Won't cause to open all indexes because IndexView doesn't keep the `Index` opened. let indexes: Vec = indexes.into_iter().flatten().collect(); diff --git a/crates/meilisearch/src/routes/indexes/search.rs b/crates/meilisearch/src/routes/indexes/search.rs index 609439b4a..fbaac67da 100644 --- a/crates/meilisearch/src/routes/indexes/search.rs +++ b/crates/meilisearch/src/routes/indexes/search.rs @@ -243,11 +243,19 @@ pub async fn search_with_url_query( let index = index_scheduler.index(&index_uid)?; let features = index_scheduler.features(); - let search_kind = search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; + let search_kind = + search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?; let permit = search_queue.try_get_search_permit().await?; let search_result = tokio::task::spawn_blocking(move || { - perform_search(index_uid.to_string(), &index, query, search_kind, retrieve_vector, index_scheduler.features()) + perform_search( + index_uid.to_string(), + &index, + query, + search_kind, + retrieve_vector, + index_scheduler.features(), + ) }) .await; permit.drop().await; @@ -287,12 +295,20 @@ pub async fn search_with_post( let features = index_scheduler.features(); - let search_kind = search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; + let search_kind = + search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?; let permit = search_queue.try_get_search_permit().await?; let search_result = tokio::task::spawn_blocking(move || { - perform_search(index_uid.to_string(), &index, query, search_kind, retrieve_vectors, index_scheduler.features()) + perform_search( + index_uid.to_string(), + &index, + query, + search_kind, + retrieve_vectors, + index_scheduler.features(), + ) }) .await; permit.drop().await; diff --git a/crates/meilisearch/src/routes/indexes/similar.rs b/crates/meilisearch/src/routes/indexes/similar.rs index a0fccff52..f47771061 100644 --- a/crates/meilisearch/src/routes/indexes/similar.rs +++ b/crates/meilisearch/src/routes/indexes/similar.rs @@ -103,8 +103,13 @@ async fn similar( let index = index_scheduler.index(&index_uid)?; - let (embedder_name, embedder, quantized) = - SearchKind::embedder(&index_scheduler, index_uid.to_string(), &index, &query.embedder, None)?; + let (embedder_name, embedder, quantized) = SearchKind::embedder( + &index_scheduler, + index_uid.to_string(), + &index, + &query.embedder, + None, + )?; tokio::task::spawn_blocking(move || { perform_similar( diff --git a/crates/meilisearch/src/routes/multi_search.rs b/crates/meilisearch/src/routes/multi_search.rs index c4496e41c..a2db0b22b 100644 --- a/crates/meilisearch/src/routes/multi_search.rs +++ b/crates/meilisearch/src/routes/multi_search.rs @@ -127,14 +127,26 @@ pub async fn multi_search_with_post( let index_uid_str = index_uid.to_string(); - let search_kind = - search_kind(&query, index_scheduler.get_ref(), index_uid_str.clone(), &index, features) - .with_index(query_index)?; + let search_kind = search_kind( + &query, + index_scheduler.get_ref(), + index_uid_str.clone(), + &index, + features, + ) + .with_index(query_index)?; let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features) .with_index(query_index)?; let search_result = tokio::task::spawn_blocking(move || { - perform_search(index_uid_str.clone(), &index, query, search_kind, retrieve_vector, features) + perform_search( + index_uid_str.clone(), + &index, + query, + search_kind, + retrieve_vector, + features, + ) }) .await .with_index(query_index)?; diff --git a/crates/meilisearch/src/search/federated.rs b/crates/meilisearch/src/search/federated.rs index 5aae82c66..c1c6bb7d7 100644 --- a/crates/meilisearch/src/search/federated.rs +++ b/crates/meilisearch/src/search/federated.rs @@ -560,7 +560,8 @@ pub fn perform_federated_search( // use an immediately invoked lambda to capture the result without returning from the function let res: Result<(), ResponseError> = (|| { - let search_kind = search_kind(&query, index_scheduler, index_uid.to_string(), &index, features)?; + let search_kind = + search_kind(&query, index_scheduler, index_uid.to_string(), &index, features)?; let canonicalization_kind = match (&search_kind, &query.q) { (SearchKind::SemanticOnly { .. }, _) => { @@ -636,7 +637,8 @@ pub fn perform_federated_search( search.offset(0); search.limit(required_hit_count); - let (result, _semantic_hit_count) = super::search_from_kind(index_uid.to_string(), search_kind, search)?; + let (result, _semantic_hit_count) = + super::search_from_kind(index_uid.to_string(), search_kind, search)?; let format = AttributesFormat { attributes_to_retrieve: query.attributes_to_retrieve, retrieve_vectors, @@ -670,8 +672,10 @@ pub fn perform_federated_search( let formatter_builder = HitMaker::formatter_builder(matching_words, tokenizer); - let hit_maker = HitMaker::new(&index, &rtxn, format, formatter_builder) - .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; + let hit_maker = + HitMaker::new(&index, &rtxn, format, formatter_builder).map_err(|e| { + MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())) + })?; results_by_query.push(SearchResultByQuery { federation_options, diff --git a/crates/meilisearch/src/search/mod.rs b/crates/meilisearch/src/search/mod.rs index 9c386eeb2..cb4e3acb6 100644 --- a/crates/meilisearch/src/search/mod.rs +++ b/crates/meilisearch/src/search/mod.rs @@ -19,7 +19,9 @@ use meilisearch_types::locales::Locale; use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy}; use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors; use meilisearch_types::milli::vector::Embedder; -use meilisearch_types::milli::{FacetValueHit, InternalError, OrderBy, SearchForFacetValues, TimeBudget}; +use meilisearch_types::milli::{ + FacetValueHit, InternalError, OrderBy, SearchForFacetValues, TimeBudget, +}; use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; use meilisearch_types::{milli, Document}; use milli::tokenizer::{Language, TokenizerBuilder}; @@ -1077,17 +1079,20 @@ pub fn search_from_kind( ) -> Result<(milli::SearchResult, Option), MeilisearchHttpError> { let (milli_result, semantic_hit_count) = match &search_kind { SearchKind::KeywordOnly => { - let results = search.execute() + let results = search + .execute() .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; (results, None) - }, + } SearchKind::SemanticOnly { .. } => { - let results = search.execute() + let results = search + .execute() .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; let semantic_hit_count = results.document_scores.len() as u32; (results, Some(semantic_hit_count)) } - SearchKind::Hybrid { semantic_ratio, .. } => search.execute_hybrid(*semantic_ratio) + SearchKind::Hybrid { semantic_ratio, .. } => search + .execute_hybrid(*semantic_ratio) .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid)))?, }; Ok((milli_result, semantic_hit_count)) @@ -1285,11 +1290,7 @@ impl<'a> HitMaker<'a> { }) } - pub fn make_hit( - &self, - id: u32, - score: &[ScoreDetails], - ) -> milli::Result { + pub fn make_hit(&self, id: u32, score: &[ScoreDetails]) -> milli::Result { let (_, obkv) = self.index.iter_documents(self.rtxn, std::iter::once(id))?.next().unwrap()?; @@ -1332,7 +1333,10 @@ impl<'a> HitMaker<'a> { .is_some_and(|conf| conf.user_provided.contains(id)); let embeddings = ExplicitVectors { embeddings: Some(vector.into()), regenerate: !user_provided }; - vectors.insert(name, serde_json::to_value(embeddings).map_err(InternalError::SerdeJson)?); + vectors.insert( + name, + serde_json::to_value(embeddings).map_err(InternalError::SerdeJson)?, + ); } document.insert("_vectors".into(), vectors.into()); } @@ -1937,7 +1941,7 @@ fn parse_filter_array(arr: &[Value]) -> Result, MeilisearchHttpEr } } - Ok(Filter::from_array(ands).map_err(|e|MeilisearchHttpError::from_milli(e,None))?) + Filter::from_array(ands).map_err(|e| MeilisearchHttpError::from_milli(e, None)) } #[cfg(test)] diff --git a/crates/meilisearch/tests/documents/add_documents.rs b/crates/meilisearch/tests/documents/add_documents.rs index c37b3a5e3..964f51d4e 100644 --- a/crates/meilisearch/tests/documents/add_documents.rs +++ b/crates/meilisearch/tests/documents/add_documents.rs @@ -1446,7 +1446,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() { "indexedDocuments": 0 }, "error": { - "message": "A document cannot contain more than 65,535 fields.", + "message": "Index `test`: A document cannot contain more than 65,535 fields.", "code": "max_fields_limit_exceeded", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#max_fields_limit_exceeded" @@ -2242,7 +2242,7 @@ async fn add_invalid_geo_and_then_settings() { ] }, "error": { - "message": "Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", + "message": "Index `test`: Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", "code": "invalid_document_geo_field", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" diff --git a/crates/meilisearch/tests/documents/errors.rs b/crates/meilisearch/tests/documents/errors.rs index 4c644ae98..9789e1eda 100644 --- a/crates/meilisearch/tests/documents/errors.rs +++ b/crates/meilisearch/tests/documents/errors.rs @@ -602,7 +602,7 @@ async fn delete_document_by_filter() { "originalFilter": "\"doggo = bernese\"" }, "error": { - "message": "Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", + "message": "Index `EMPTY_INDEX`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", "code": "invalid_document_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_filter" @@ -633,7 +633,7 @@ async fn delete_document_by_filter() { "originalFilter": "\"catto = jorts\"" }, "error": { - "message": "Attribute `catto` is not filterable. Available filterable attributes are: `id`, `title`.\n1:6 catto = jorts", + "message": "Index `SHARED_DOCUMENTS`: Attribute `catto` is not filterable. Available filterable attributes are: `id`, `title`.\n1:6 catto = jorts", "code": "invalid_document_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_filter" diff --git a/crates/meilisearch/tests/index/update_index.rs b/crates/meilisearch/tests/index/update_index.rs index 36ec27306..f991c3580 100644 --- a/crates/meilisearch/tests/index/update_index.rs +++ b/crates/meilisearch/tests/index/update_index.rs @@ -95,7 +95,7 @@ async fn error_update_existing_primary_key() { let response = index.wait_task(2).await; let expected_response = json!({ - "message": "Index already has a primary key: `id`.", + "message": "Index `test`: Index already has a primary key: `id`.", "code": "index_primary_key_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists" diff --git a/crates/meilisearch/tests/search/errors.rs b/crates/meilisearch/tests/search/errors.rs index 6840f8fba..1401637ba 100644 --- a/crates/meilisearch/tests/search/errors.rs +++ b/crates/meilisearch/tests/search/errors.rs @@ -711,7 +711,7 @@ async fn filter_invalid_attribute_array() { index.wait_task(task.uid()).await; let expected_response = json!({ - "message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", + "message": format!("Index `{}`: Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", index.uid), "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -733,7 +733,7 @@ async fn filter_invalid_attribute_string() { index.wait_task(task.uid()).await; let expected_response = json!({ - "message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", + "message": format!("Index `{}`: Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", index.uid), "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -940,7 +940,7 @@ async fn sort_unsortable_attribute() { index.wait_task(response.uid()).await.succeeded(); let expected_response = json!({ - "message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.", + "message": format!("Index `{}`: Attribute `title` is not sortable. Available sortable attributes are: `id`.", index.uid), "code": "invalid_search_sort", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_sort" @@ -998,7 +998,7 @@ async fn sort_unset_ranking_rule() { index.wait_task(response.uid()).await.succeeded(); let expected_response = json!({ - "message": "You must specify where `sort` is listed in the rankingRules setting to use the sort parameter at search time.", + "message": format!("Index `{}`: You must specify where `sort` is listed in the rankingRules setting to use the sort parameter at search time.", index.uid), "code": "invalid_search_sort", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_sort" @@ -1018,8 +1018,8 @@ async fn sort_unset_ranking_rule() { #[actix_rt::test] async fn search_on_unknown_field() { - let server = Server::new_shared(); - let index = server.unique_index(); + let server = Server::new().await; + let index = server.index("test"); let (response, _code) = index.update_settings_searchable_attributes(json!(["id", "title"])).await; index.wait_task(response.uid()).await.succeeded(); @@ -1031,7 +1031,7 @@ async fn search_on_unknown_field() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" @@ -1044,8 +1044,8 @@ async fn search_on_unknown_field() { #[actix_rt::test] async fn search_on_unknown_field_plus_joker() { - let server = Server::new_shared(); - let index = server.unique_index(); + let server = Server::new().await; + let index = server.index("test"); let (response, _code) = index.update_settings_searchable_attributes(json!(["id", "title"])).await; index.wait_task(response.uid()).await.succeeded(); @@ -1057,7 +1057,7 @@ async fn search_on_unknown_field_plus_joker() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" @@ -1074,7 +1074,7 @@ async fn search_on_unknown_field_plus_joker() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", + "message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", "code": "invalid_search_attributes_to_search_on", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" @@ -1087,8 +1087,8 @@ async fn search_on_unknown_field_plus_joker() { #[actix_rt::test] async fn distinct_at_search_time() { - let server = Server::new_shared(); - let index = server.unique_index(); + let server = Server::new().await; + let index = server.index("test"); let (task, _) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); @@ -1097,7 +1097,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.", + "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" @@ -1112,7 +1112,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, machin`.", + "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, machin`.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" @@ -1127,7 +1127,7 @@ async fn distinct_at_search_time() { snapshot!(code, @"400 Bad Request"); snapshot!(response, @r###" { - "message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, <..hidden-attributes>`.", + "message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, <..hidden-attributes>`.", "code": "invalid_search_distinct", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" diff --git a/crates/meilisearch/tests/search/multi.rs b/crates/meilisearch/tests/search/multi.rs index eaa1da15f..2735ab65c 100644 --- a/crates/meilisearch/tests/search/multi.rs +++ b/crates/meilisearch/tests/search/multi.rs @@ -1070,7 +1070,7 @@ async fn federation_one_query_error() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Inside `.queries[1]`: Attribute `title` is not filterable. This index does not have configured filterable attributes.\n1:6 title = toto", + "message": "Inside `.queries[1]`: Index `nested`: Attribute `title` is not filterable. This index does not have configured filterable attributes.\n1:6 title = toto", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -1102,7 +1102,7 @@ async fn federation_one_query_sort_error() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Inside `.queries[1]`: Attribute `doggos` is not sortable. This index does not have configured sortable attributes.", + "message": "Inside `.queries[1]`: Index `nested`: Attribute `doggos` is not sortable. This index does not have configured sortable attributes.", "code": "invalid_search_sort", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_sort" @@ -1166,7 +1166,7 @@ async fn federation_multiple_query_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Inside `.queries[0]`: Attribute `title` is not filterable. This index does not have configured filterable attributes.\n1:6 title = toto", + "message": "Inside `.queries[0]`: Index `test`: Attribute `title` is not filterable. This index does not have configured filterable attributes.\n1:6 title = toto", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -1198,7 +1198,7 @@ async fn federation_multiple_query_sort_errors() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Inside `.queries[0]`: Attribute `title` is not sortable. This index does not have configured sortable attributes.", + "message": "Inside `.queries[0]`: Index `test`: Attribute `title` is not sortable. This index does not have configured sortable attributes.", "code": "invalid_search_sort", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_sort" @@ -1231,7 +1231,7 @@ async fn federation_multiple_query_errors_interleaved() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Inside `.queries[1]`: Attribute `doggos` is not filterable. This index does not have configured filterable attributes.\n1:7 doggos IN [intel, kefir]", + "message": "Inside `.queries[1]`: Index `nested`: Attribute `doggos` is not filterable. This index does not have configured filterable attributes.\n1:7 doggos IN [intel, kefir]", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -1264,7 +1264,7 @@ async fn federation_multiple_query_sort_errors_interleaved() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Inside `.queries[1]`: Attribute `doggos` is not sortable. This index does not have configured sortable attributes.", + "message": "Inside `.queries[1]`: Index `nested`: Attribute `doggos` is not sortable. This index does not have configured sortable attributes.", "code": "invalid_search_sort", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_sort" diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index c59313885..33441f19d 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -529,7 +529,7 @@ async fn test_summarized_delete_documents_by_filter() { "originalFilter": "\"doggo = bernese\"" }, "error": { - "message": "Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", + "message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", "code": "invalid_document_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_filter" diff --git a/crates/meilisearch/tests/vector/binary_quantized.rs b/crates/meilisearch/tests/vector/binary_quantized.rs index d3fe3c824..e344b8051 100644 --- a/crates/meilisearch/tests/vector/binary_quantized.rs +++ b/crates/meilisearch/tests/vector/binary_quantized.rs @@ -317,7 +317,7 @@ async fn try_to_disable_binary_quantization() { } }, "error": { - "message": "`.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.", + "message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.", "code": "invalid_settings_embedders", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_settings_embedders" diff --git a/crates/meilisearch/tests/vector/mod.rs b/crates/meilisearch/tests/vector/mod.rs index 47d0c1051..023d263e4 100644 --- a/crates/meilisearch/tests/vector/mod.rs +++ b/crates/meilisearch/tests/vector/mod.rs @@ -249,7 +249,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Missing field `regenerate` inside `.manual`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Missing field `regenerate` inside `.manual`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -278,7 +278,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Missing field `regenerate` inside `.manual`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Missing field `regenerate` inside `.manual`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -308,7 +308,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.regenerate`: expected a boolean, but found a string: `\"yes please\"`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.regenerate`: expected a boolean, but found a string: `\"yes please\"`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -337,7 +337,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings`: expected null or an array, but found a boolean: `true`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings`: expected null or an array, but found a boolean: `true`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -366,7 +366,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[0]`: expected a number or an array, but found a boolean: `true`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[0]`: expected a number or an array, but found a boolean: `true`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -395,7 +395,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[0][0]`: expected a number, but found a boolean: `true`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[0][0]`: expected a number, but found a boolean: `true`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -436,7 +436,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[1]`: expected a number, but found an array: `[0.2,0.3]`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[1]`: expected a number, but found an array: `[0.2,0.3]`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -464,7 +464,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[1]`: expected an array, but found a number: `0.3`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[1]`: expected an array, but found a number: `0.3`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -492,7 +492,7 @@ async fn user_provided_embeddings_error() { "indexedDocuments": 0 }, "error": { - "message": "Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[0][1]`: expected a number, but found a boolean: `true`", + "message": "Index `doggo`: Bad embedder configuration in the document with id: `\"0\"`. Invalid value type at `.manual.embeddings[0][1]`: expected a number, but found a boolean: `true`", "code": "invalid_vectors_type", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" @@ -532,7 +532,7 @@ async fn user_provided_vectors_error() { "indexedDocuments": 0 }, "error": { - "message": "While embedding documents for embedder `manual`: no vectors provided for document \"40\" and at least 4 other document(s)\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: opt-out for a document with `_vectors.manual: null`", + "message": "Index `doggo`: While embedding documents for embedder `manual`: no vectors provided for document \"40\" and at least 4 other document(s)\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: opt-out for a document with `_vectors.manual: null`", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -561,7 +561,7 @@ async fn user_provided_vectors_error() { "indexedDocuments": 0 }, "error": { - "message": "While embedding documents for embedder `manual`: no vectors provided for document \"42\"\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: try replacing `_vector` by `_vectors` in 1 document(s).", + "message": "Index `doggo`: While embedding documents for embedder `manual`: no vectors provided for document \"42\"\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: try replacing `_vector` by `_vectors` in 1 document(s).", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -590,7 +590,7 @@ async fn user_provided_vectors_error() { "indexedDocuments": 0 }, "error": { - "message": "While embedding documents for embedder `manual`: no vectors provided for document \"42\"\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: try replacing `_vectors.manaul` by `_vectors.manual` in 1 document(s).", + "message": "Index `doggo`: While embedding documents for embedder `manual`: no vectors provided for document \"42\"\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: try replacing `_vectors.manaul` by `_vectors.manual` in 1 document(s).", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" diff --git a/crates/meilisearch/tests/vector/openai.rs b/crates/meilisearch/tests/vector/openai.rs index 04c068c40..35979e8fb 100644 --- a/crates/meilisearch/tests/vector/openai.rs +++ b/crates/meilisearch/tests/vector/openai.rs @@ -700,7 +700,7 @@ async fn bad_api_key() { } }, "error": { - "message": "While embedding documents for embedder `default`: user error: could not authenticate against OpenAI server\n - server replied with `{\"error\":{\"message\":\"Incorrect API key provided: Bearer doggo. You can find your API key at https://platform.openai.com/account/api-keys.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\":\"invalid_api_key\"}}`\n - Hint: Check the `apiKey` parameter in the embedder configuration, and the `MEILI_OPENAI_API_KEY` and `OPENAI_API_KEY` environment variables", + "message": "Index `doggo`: While embedding documents for embedder `default`: user error: could not authenticate against OpenAI server\n - server replied with `{\"error\":{\"message\":\"Incorrect API key provided: Bearer doggo. You can find your API key at https://platform.openai.com/account/api-keys.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\":\"invalid_api_key\"}}`\n - Hint: Check the `apiKey` parameter in the embedder configuration, and the `MEILI_OPENAI_API_KEY` and `OPENAI_API_KEY` environment variables", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -743,7 +743,7 @@ async fn bad_api_key() { } }, "error": { - "message": "While embedding documents for embedder `default`: user error: could not authenticate against OpenAI server\n - server replied with `{\"error\":{\"message\":\"You didn't provide an API key. You need to provide your API key in an Authorization header using Bearer auth (i.e. Authorization: Bearer YOUR_KEY), or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\":null}}`\n - Hint: Check the `apiKey` parameter in the embedder configuration, and the `MEILI_OPENAI_API_KEY` and `OPENAI_API_KEY` environment variables", + "message": "Index `doggo`: While embedding documents for embedder `default`: user error: could not authenticate against OpenAI server\n - server replied with `{\"error\":{\"message\":\"You didn't provide an API key. You need to provide your API key in an Authorization header using Bearer auth (i.e. Authorization: Bearer YOUR_KEY), or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\":null}}`\n - Hint: Check the `apiKey` parameter in the embedder configuration, and the `MEILI_OPENAI_API_KEY` and `OPENAI_API_KEY` environment variables", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" diff --git a/crates/meilisearch/tests/vector/rest.rs b/crates/meilisearch/tests/vector/rest.rs index 2748d0846..1e645d412 100644 --- a/crates/meilisearch/tests/vector/rest.rs +++ b/crates/meilisearch/tests/vector/rest.rs @@ -937,7 +937,7 @@ async fn bad_settings() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response`, while extracting a single \"{{embedding}}\", expected `response` to be an array of numbers, but failed to parse server response:\n - invalid type: map, expected a sequence", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response`, while extracting a single \"{{embedding}}\", expected `response` to be an array of numbers, but failed to parse server response:\n - invalid type: map, expected a sequence", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -976,7 +976,7 @@ async fn bad_settings() { "indexedDocuments": 0 }, "error": { - "message": "While embedding documents for embedder `rest`: runtime error: was expecting embeddings of dimension `2`, got embeddings of dimensions `3`", + "message": "Index `doggo`: While embedding documents for embedder `rest`: runtime error: was expecting embeddings of dimension `2`, got embeddings of dimensions `3`", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1127,7 +1127,7 @@ async fn server_returns_bad_request() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with user error: sent a bad request to embedding server\n - Hint: check that the `request` in the embedder configuration matches the remote server's API\n - server replied with `{\"error\":\"Invalid request: invalid type: string \\\"test\\\", expected struct MultipleRequest at line 1 column 6\"}`", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with user error: sent a bad request to embedding server\n - Hint: check that the `request` in the embedder configuration matches the remote server's API\n - server replied with `{\"error\":\"Invalid request: invalid type: string \\\"test\\\", expected struct MultipleRequest at line 1 column 6\"}`", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1194,7 +1194,7 @@ async fn server_returns_bad_request() { "indexedDocuments": 0 }, "error": { - "message": "While embedding documents for embedder `rest`: user error: sent a bad request to embedding server\n - Hint: check that the `request` in the embedder configuration matches the remote server's API\n - server replied with `{\"error\":\"Invalid request: invalid type: string \\\"name: kefir\\\\n\\\", expected struct MultipleRequest at line 1 column 15\"}`", + "message": "Index `doggo`: While embedding documents for embedder `rest`: user error: sent a bad request to embedding server\n - Hint: check that the `request` in the embedder configuration matches the remote server's API\n - server replied with `{\"error\":\"Invalid request: invalid type: string \\\"name: kefir\\\\n\\\", expected struct MultipleRequest at line 1 column 15\"}`", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1252,7 +1252,7 @@ async fn server_returns_bad_response() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response`, while extracting the array of \"{{embedding}}\"s, configuration expects `response` to be an array with at least 1 item(s) but server sent an object with 1 field(s)", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response`, while extracting the array of \"{{embedding}}\"s, configuration expects `response` to be an array with at least 1 item(s) but server sent an object with 1 field(s)", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1307,7 +1307,7 @@ async fn server_returns_bad_response() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response`, while extracting item #0 from the array of \"{{embedding}}\"s, expected `response` to be an array of numbers, but failed to parse server response:\n - invalid type: map, expected a sequence", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response`, while extracting item #0 from the array of \"{{embedding}}\"s, expected `response` to be an array of numbers, but failed to parse server response:\n - invalid type: map, expected a sequence", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1358,7 +1358,7 @@ async fn server_returns_bad_response() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response.output`, while extracting a single \"{{embedding}}\", expected `output` to be an array of numbers, but failed to parse server response:\n - invalid type: map, expected f32", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response.output`, while extracting a single \"{{embedding}}\", expected `output` to be an array of numbers, but failed to parse server response:\n - invalid type: map, expected f32", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1421,7 +1421,7 @@ async fn server_returns_bad_response() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response.embedding`, while extracting item #0 from the array of \"{{embedding}}\"s, configuration expects `embedding` to be an object with key `data` but server sent an array of size 3", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response.embedding`, while extracting item #0 from the array of \"{{embedding}}\"s, configuration expects `embedding` to be an object with key `data` but server sent an array of size 3", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1484,7 +1484,7 @@ async fn server_returns_bad_response() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response.output[0]`, while extracting a single \"{{embedding}}\", configuration expects key \"embeddings\", which is missing in response\n - Hint: item #0 inside `output` has key `embedding`, did you mean `response.output[0].embedding` in embedder configuration?", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with runtime error: error extracting embeddings from the response:\n - in `response.output[0]`, while extracting a single \"{{embedding}}\", configuration expects key \"embeddings\", which is missing in response\n - Hint: item #0 inside `output` has key `embedding`, did you mean `response.output[0].embedding` in embedder configuration?", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1846,7 +1846,7 @@ async fn server_custom_header() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with user error: could not authenticate against embedding server\n - server replied with `{\"error\":\"missing header 'my-nonstandard-auth'\"}`\n - Hint: Check the `apiKey` parameter in the embedder configuration", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with user error: could not authenticate against embedding server\n - server replied with `{\"error\":\"missing header 'my-nonstandard-auth'\"}`\n - Hint: Check the `apiKey` parameter in the embedder configuration", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -1888,7 +1888,7 @@ async fn server_custom_header() { } }, "error": { - "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with user error: could not authenticate against embedding server\n - server replied with `{\"error\":\"thou shall not pass, Balrog\"}`\n - Hint: Check the `apiKey` parameter in the embedder configuration", + "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n - test embedding failed with user error: could not authenticate against embedding server\n - server replied with `{\"error\":\"thou shall not pass, Balrog\"}`\n - Hint: Check the `apiKey` parameter in the embedder configuration", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error" @@ -2031,7 +2031,7 @@ async fn searchable_reindex() { ] }, "error": { - "message": "While embedding documents for embedder `rest`: error: received unexpected HTTP 404 from embedding server\n - server replied with `{\"error\":\"text not found\",\"text\":\"breed: patou\\n\"}`", + "message": "Index `doggo`: While embedding documents for embedder `rest`: error: received unexpected HTTP 404 from embedding server\n - server replied with `{\"error\":\"text not found\",\"text\":\"breed: patou\\n\"}`", "code": "vector_embedding_error", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#vector_embedding_error"