mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-01-18 17:08:26 +08:00
Merge #5232
Some checks failed
Run the indexing fuzzer / Setup the action (push) Successful in 1h5m2s
Publish binaries to GitHub release / Publish binary for ${{ matrix.os }} (meilisearch, meilisearch-macos-amd64, macos-13) (push) Waiting to run
Publish binaries to GitHub release / Publish binary for macOS silicon (meilisearch-macos-apple-silicon, aarch64-apple-darwin) (push) Waiting to run
Test suite / Tests on ${{ matrix.os }} (macos-13) (push) Waiting to run
Look for flaky tests / flaky (push) Failing after 1s
Publish binaries to GitHub release / Check the version validity (push) Successful in 53s
Publish binaries to GitHub release / Publish binary for Linux (push) Failing after 1s
Publish binaries to GitHub release / Publish binary for aarch64 (meilisearch-linux-aarch64, aarch64-unknown-linux-gnu) (push) Failing after 15s
Publish binaries to GitHub release / Publish binary for ${{ matrix.os }} (meilisearch.exe, meilisearch-windows-amd64.exe, windows-2022) (push) Failing after 27s
Indexing bench (push) / Run and upload benchmarks (push) Has been cancelled
Benchmarks of indexing (push) / Run and upload benchmarks (push) Has been cancelled
Benchmarks of search for geo (push) / Run and upload benchmarks (push) Has been cancelled
Benchmarks of search for songs (push) / Run and upload benchmarks (push) Has been cancelled
Benchmarks of search for Wikipedia articles (push) / Run and upload benchmarks (push) Has been cancelled
Test suite / Tests almost all features (push) Failing after 1s
Test suite / Test disabled tokenization (push) Failing after 1s
Test suite / Tests on ${{ matrix.os }} (windows-2022) (push) Failing after 13s
Test suite / Run tests in debug (push) Failing after 2s
Test suite / Tests on ubuntu-20.04 (push) Failing after 13s
Test suite / Run Clippy (push) Failing after 32s
Test suite / Run Rustfmt (push) Successful in 1m43s
Some checks failed
Run the indexing fuzzer / Setup the action (push) Successful in 1h5m2s
Publish binaries to GitHub release / Publish binary for ${{ matrix.os }} (meilisearch, meilisearch-macos-amd64, macos-13) (push) Waiting to run
Publish binaries to GitHub release / Publish binary for macOS silicon (meilisearch-macos-apple-silicon, aarch64-apple-darwin) (push) Waiting to run
Test suite / Tests on ${{ matrix.os }} (macos-13) (push) Waiting to run
Look for flaky tests / flaky (push) Failing after 1s
Publish binaries to GitHub release / Check the version validity (push) Successful in 53s
Publish binaries to GitHub release / Publish binary for Linux (push) Failing after 1s
Publish binaries to GitHub release / Publish binary for aarch64 (meilisearch-linux-aarch64, aarch64-unknown-linux-gnu) (push) Failing after 15s
Publish binaries to GitHub release / Publish binary for ${{ matrix.os }} (meilisearch.exe, meilisearch-windows-amd64.exe, windows-2022) (push) Failing after 27s
Indexing bench (push) / Run and upload benchmarks (push) Has been cancelled
Benchmarks of indexing (push) / Run and upload benchmarks (push) Has been cancelled
Benchmarks of search for geo (push) / Run and upload benchmarks (push) Has been cancelled
Benchmarks of search for songs (push) / Run and upload benchmarks (push) Has been cancelled
Benchmarks of search for Wikipedia articles (push) / Run and upload benchmarks (push) Has been cancelled
Test suite / Tests almost all features (push) Failing after 1s
Test suite / Test disabled tokenization (push) Failing after 1s
Test suite / Tests on ${{ matrix.os }} (windows-2022) (push) Failing after 13s
Test suite / Run tests in debug (push) Failing after 2s
Test suite / Tests on ubuntu-20.04 (push) Failing after 13s
Test suite / Run Clippy (push) Failing after 32s
Test suite / Run Rustfmt (push) Successful in 1m43s
5232: Stabilize vector store feature r=Kerollmops a=dureuill # Pull Request ## Related issue Fixes #4733 ## What does this PR do? - `vectorStore` feature can no longer be set or get from `/experimental-features` - That feature has been removed, and there is no longer any check for its activation - Always display `embedders` in the settings, even if empty - Always hide `_vectors` in documents, unless `retrieveVectors: true` - Make error codes consistent with the usual nomenclature - Update tests as needed Co-authored-by: Louis Dureuil <louis@meilisearch.com>
This commit is contained in:
commit
c85146524b
@ -458,7 +458,7 @@ pub(crate) mod test {
|
||||
}
|
||||
|
||||
fn create_test_features() -> RuntimeTogglableFeatures {
|
||||
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
|
||||
RuntimeTogglableFeatures::default()
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -327,10 +327,7 @@ pub(crate) mod test {
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
dump.features().unwrap().unwrap(),
|
||||
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
|
||||
);
|
||||
assert_eq!(dump.features().unwrap().unwrap(), RuntimeTogglableFeatures::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -373,10 +370,7 @@ pub(crate) mod test {
|
||||
|
||||
assert_eq!(test.documents().unwrap().count(), 1);
|
||||
|
||||
assert_eq!(
|
||||
dump.features().unwrap().unwrap(),
|
||||
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
|
||||
);
|
||||
assert_eq!(dump.features().unwrap().unwrap(), RuntimeTogglableFeatures::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -56,19 +56,6 @@ impl RoFeatures {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_vector(&self, disabled_action: &'static str) -> Result<()> {
|
||||
if self.runtime.vector_store {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(FeatureNotEnabledError {
|
||||
disabled_action,
|
||||
feature: "vector store",
|
||||
issue_link: "https://github.com/meilisearch/product/discussions/677",
|
||||
}
|
||||
.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_edit_documents_by_function(&self, disabled_action: &'static str) -> Result<()> {
|
||||
if self.runtime.edit_documents_by_function {
|
||||
Ok(())
|
||||
|
@ -243,8 +243,9 @@ InvalidVectorsType , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidEmbedder , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidHybridQuery , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchEmbedder , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSimilarEmbedder , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchHybridQuery , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
||||
@ -443,7 +444,8 @@ impl ErrorCode for milli::Error {
|
||||
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
||||
Code::InvalidSettingsTypoTolerance
|
||||
}
|
||||
UserError::InvalidEmbedder(_) => Code::InvalidEmbedder,
|
||||
UserError::InvalidSearchEmbedder(_) => Code::InvalidSearchEmbedder,
|
||||
UserError::InvalidSimilarEmbedder(_) => Code::InvalidSimilarEmbedder,
|
||||
UserError::VectorEmbeddingError(_) | UserError::DocumentEmbeddingError(_) => {
|
||||
Code::VectorEmbeddingError
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ use serde::{Deserialize, Serialize};
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct RuntimeTogglableFeatures {
|
||||
pub vector_store: bool,
|
||||
pub metrics: bool,
|
||||
pub logs_route: bool,
|
||||
pub edit_documents_by_function: bool,
|
||||
|
@ -866,7 +866,7 @@ pub fn settings(
|
||||
(name, SettingEmbeddingSettings { inner: Setting::Set(config.into()) })
|
||||
})
|
||||
.collect();
|
||||
let embedders = if embedders.is_empty() { Setting::NotSet } else { Setting::Set(embedders) };
|
||||
let embedders = Setting::Set(embedders);
|
||||
|
||||
let search_cutoff_ms = index.search_cutoff(rtxn)?;
|
||||
|
||||
|
@ -177,13 +177,12 @@ impl SegmentAnalytics {
|
||||
/// This structure represent the `infos` field we send in the analytics.
|
||||
/// It's quite close to the `Opt` structure except all sensitive informations
|
||||
/// have been simplified to a boolean.
|
||||
/// It's send as-is in amplitude thus you should never update a name of the
|
||||
/// It's sent as-is in amplitude thus you should never update a name of the
|
||||
/// struct without the approval of the PM.
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
struct Infos {
|
||||
env: String,
|
||||
experimental_contains_filter: bool,
|
||||
experimental_vector_store: bool,
|
||||
experimental_enable_metrics: bool,
|
||||
experimental_edit_documents_by_function: bool,
|
||||
experimental_search_queue_size: usize,
|
||||
@ -280,7 +279,6 @@ impl Infos {
|
||||
indexer_options;
|
||||
|
||||
let RuntimeTogglableFeatures {
|
||||
vector_store,
|
||||
metrics,
|
||||
logs_route,
|
||||
edit_documents_by_function,
|
||||
@ -292,7 +290,6 @@ impl Infos {
|
||||
Self {
|
||||
env,
|
||||
experimental_contains_filter: experimental_contains_filter | contains_filter,
|
||||
experimental_vector_store: vector_store,
|
||||
experimental_edit_documents_by_function: edit_documents_by_function,
|
||||
experimental_enable_metrics: experimental_enable_metrics | metrics,
|
||||
experimental_search_queue_size,
|
||||
|
@ -46,7 +46,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
security(("Bearer" = ["experimental_features.get", "experimental_features.*", "*"])),
|
||||
responses(
|
||||
(status = OK, description = "Experimental features are returned", body = RuntimeTogglableFeatures, content_type = "application/json", example = json!(RuntimeTogglableFeatures {
|
||||
vector_store: Some(true),
|
||||
metrics: Some(true),
|
||||
logs_route: Some(false),
|
||||
edit_documents_by_function: Some(false),
|
||||
@ -71,6 +70,7 @@ async fn get_features(
|
||||
let features = index_scheduler.features();
|
||||
|
||||
let features = features.runtime_features();
|
||||
let features: RuntimeTogglableFeatures = features.into();
|
||||
debug!(returns = ?features, "Get features");
|
||||
HttpResponse::Ok().json(features)
|
||||
}
|
||||
@ -80,8 +80,6 @@ async fn get_features(
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct RuntimeTogglableFeatures {
|
||||
#[deserr(default)]
|
||||
pub vector_store: Option<bool>,
|
||||
#[deserr(default)]
|
||||
pub metrics: Option<bool>,
|
||||
#[deserr(default)]
|
||||
@ -92,9 +90,26 @@ pub struct RuntimeTogglableFeatures {
|
||||
pub contains_filter: Option<bool>,
|
||||
}
|
||||
|
||||
impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures {
|
||||
fn from(value: meilisearch_types::features::RuntimeTogglableFeatures) -> Self {
|
||||
let meilisearch_types::features::RuntimeTogglableFeatures {
|
||||
metrics,
|
||||
logs_route,
|
||||
edit_documents_by_function,
|
||||
contains_filter,
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
metrics: Some(metrics),
|
||||
logs_route: Some(logs_route),
|
||||
edit_documents_by_function: Some(edit_documents_by_function),
|
||||
contains_filter: Some(contains_filter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct PatchExperimentalFeatureAnalytics {
|
||||
vector_store: bool,
|
||||
metrics: bool,
|
||||
logs_route: bool,
|
||||
edit_documents_by_function: bool,
|
||||
@ -108,7 +123,6 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
vector_store: new.vector_store,
|
||||
metrics: new.metrics,
|
||||
logs_route: new.logs_route,
|
||||
edit_documents_by_function: new.edit_documents_by_function,
|
||||
@ -131,7 +145,6 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
security(("Bearer" = ["experimental_features.update", "experimental_features.*", "*"])),
|
||||
responses(
|
||||
(status = OK, description = "Experimental features are returned", body = RuntimeTogglableFeatures, content_type = "application/json", example = json!(RuntimeTogglableFeatures {
|
||||
vector_store: Some(true),
|
||||
metrics: Some(true),
|
||||
logs_route: Some(false),
|
||||
edit_documents_by_function: Some(false),
|
||||
@ -161,7 +174,6 @@ async fn patch_features(
|
||||
|
||||
let old_features = features.runtime_features();
|
||||
let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
|
||||
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
|
||||
metrics: new_features.0.metrics.unwrap_or(old_features.metrics),
|
||||
logs_route: new_features.0.logs_route.unwrap_or(old_features.logs_route),
|
||||
edit_documents_by_function: new_features
|
||||
@ -175,7 +187,6 @@ async fn patch_features(
|
||||
// the it renames to camelCase, which we don't want for analytics.
|
||||
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
|
||||
let meilisearch_types::features::RuntimeTogglableFeatures {
|
||||
vector_store,
|
||||
metrics,
|
||||
logs_route,
|
||||
edit_documents_by_function,
|
||||
@ -184,7 +195,6 @@ async fn patch_features(
|
||||
|
||||
analytics.publish(
|
||||
PatchExperimentalFeatureAnalytics {
|
||||
vector_store,
|
||||
metrics,
|
||||
logs_route,
|
||||
edit_documents_by_function,
|
||||
@ -193,6 +203,7 @@ async fn patch_features(
|
||||
&req,
|
||||
);
|
||||
index_scheduler.put_runtime_features(new_features)?;
|
||||
let new_features: RuntimeTogglableFeatures = new_features.into();
|
||||
debug!(returns = ?new_features, "Patch features");
|
||||
Ok(HttpResponse::Ok().json(new_features))
|
||||
}
|
||||
|
@ -257,8 +257,7 @@ pub async fn get_document(
|
||||
let GetDocument { fields, retrieve_vectors: param_retrieve_vectors } = params.into_inner();
|
||||
let attributes_to_retrieve = fields.merge_star_and_none();
|
||||
|
||||
let features = index_scheduler.features();
|
||||
let retrieve_vectors = RetrieveVectors::new(param_retrieve_vectors.0, features)?;
|
||||
let retrieve_vectors = RetrieveVectors::new(param_retrieve_vectors.0);
|
||||
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsGET> {
|
||||
@ -593,8 +592,7 @@ fn documents_by_query(
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let BrowseQuery { offset, limit, fields, retrieve_vectors, filter } = query;
|
||||
|
||||
let features = index_scheduler.features();
|
||||
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors, features)?;
|
||||
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let (total, documents) = retrieve_documents(
|
||||
@ -1420,7 +1418,6 @@ fn some_documents<'a, 't: 'a>(
|
||||
ret.map_err(ResponseError::from).and_then(|(key, document)| -> Result<_, ResponseError> {
|
||||
let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, document)?;
|
||||
match retrieve_vectors {
|
||||
RetrieveVectors::Ignore => {}
|
||||
RetrieveVectors::Hide => {
|
||||
document.remove("_vectors");
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ pub struct FacetSearchQuery {
|
||||
pub q: Option<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
|
||||
pub vector: Option<Vec<f32>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidHybridQuery>)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
|
||||
pub hybrid: Option<HybridQuery>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
|
||||
pub filter: Option<Value>,
|
||||
@ -252,9 +252,7 @@ pub async fn search(
|
||||
}
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let features = index_scheduler.features();
|
||||
let search_kind =
|
||||
search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index, features)?;
|
||||
let search_kind = search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index)?;
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_facet_search(
|
||||
|
@ -1,7 +1,7 @@
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||
use index_scheduler::{IndexScheduler, RoFeatures};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
@ -121,7 +121,7 @@ pub struct SearchQueryGet {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToSearchOn>)]
|
||||
#[param(value_type = Vec<String>, explode = false)]
|
||||
pub attributes_to_search_on: Option<CS<String>>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidEmbedder>)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchEmbedder>)]
|
||||
pub hybrid_embedder: Option<String>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchSemanticRatio>)]
|
||||
#[param(value_type = f32)]
|
||||
@ -185,7 +185,7 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
|
||||
(None, Some(_)) => {
|
||||
return Err(ResponseError::from_msg(
|
||||
"`hybridEmbedder` is mandatory when `hybridSemanticRatio` is present".into(),
|
||||
meilisearch_types::error::Code::InvalidHybridQuery,
|
||||
meilisearch_types::error::Code::InvalidSearchHybridQuery,
|
||||
));
|
||||
}
|
||||
(Some(embedder), None) => {
|
||||
@ -336,11 +336,10 @@ pub async fn search_with_url_query(
|
||||
let mut aggregate = SearchAggregator::<SearchGET>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let features = index_scheduler.features();
|
||||
|
||||
let search_kind =
|
||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?;
|
||||
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
||||
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors);
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(
|
||||
@ -444,11 +443,9 @@ pub async fn search_with_post(
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
|
||||
let features = index_scheduler.features();
|
||||
|
||||
let search_kind =
|
||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?;
|
||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
|
||||
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
@ -483,15 +480,7 @@ pub fn search_kind(
|
||||
index_scheduler: &IndexScheduler,
|
||||
index_uid: String,
|
||||
index: &milli::Index,
|
||||
features: RoFeatures,
|
||||
) -> Result<SearchKind, ResponseError> {
|
||||
if query.vector.is_some() {
|
||||
features.check_vector("Passing `vector` as a parameter")?;
|
||||
}
|
||||
if query.hybrid.is_some() {
|
||||
features.check_vector("Passing `hybrid` as a parameter")?;
|
||||
}
|
||||
|
||||
// handle with care, the order of cases matters, the semantics is subtle
|
||||
match (query.q.as_deref(), &query.hybrid, query.vector.as_deref()) {
|
||||
// empty query, no vector => placeholder search
|
||||
|
@ -5,7 +5,6 @@ use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::settings::{
|
||||
settings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
|
||||
};
|
||||
@ -711,10 +710,7 @@ pub async fn delete_all(
|
||||
|
||||
fn validate_settings(
|
||||
settings: Settings<Unchecked>,
|
||||
index_scheduler: &IndexScheduler,
|
||||
_index_scheduler: &IndexScheduler,
|
||||
) -> Result<Settings<Unchecked>, ResponseError> {
|
||||
if matches!(settings.embedders, Setting::Set(_)) {
|
||||
index_scheduler.features().check_vector("Passing `embedders` in settings")?
|
||||
}
|
||||
Ok(settings.validate()?)
|
||||
}
|
||||
|
@ -19,8 +19,8 @@ use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::indexes::similar_analytics::{SimilarAggregator, SimilarGET, SimilarPOST};
|
||||
use crate::search::{
|
||||
add_search_rules, perform_similar, RankingScoreThresholdSimilar, RetrieveVectors, SearchKind,
|
||||
SimilarQuery, SimilarResult, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||
add_search_rules, perform_similar, RankingScoreThresholdSimilar, RetrieveVectors, Route,
|
||||
SearchKind, SimilarQuery, SimilarResult, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||
};
|
||||
|
||||
#[derive(OpenApi)]
|
||||
@ -216,11 +216,7 @@ async fn similar(
|
||||
index_uid: IndexUid,
|
||||
mut query: SimilarQuery,
|
||||
) -> Result<SimilarResult, ResponseError> {
|
||||
let features = index_scheduler.features();
|
||||
|
||||
features.check_vector("Using the similar API")?;
|
||||
|
||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||
@ -235,6 +231,7 @@ async fn similar(
|
||||
&index,
|
||||
&query.embedder,
|
||||
None,
|
||||
Route::Similar,
|
||||
)?;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
@ -281,7 +278,7 @@ pub struct SimilarQueryGet {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarRankingScoreThreshold>, default)]
|
||||
#[param(value_type = Option<f32>)]
|
||||
pub ranking_score_threshold: Option<RankingScoreThresholdGet>,
|
||||
#[deserr(error = DeserrQueryParamError<InvalidEmbedder>)]
|
||||
#[deserr(error = DeserrQueryParamError<InvalidSimilarEmbedder>)]
|
||||
pub embedder: String,
|
||||
}
|
||||
|
||||
|
@ -106,7 +106,7 @@ pub struct SearchResults {
|
||||
{
|
||||
"id": 42,
|
||||
"title": "Batman returns",
|
||||
"overview": "The overview of batman returns",
|
||||
"overview": "The overview of batman returns",
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0
|
||||
@ -240,11 +240,9 @@ pub async fn multi_search_with_post(
|
||||
index_scheduler.get_ref(),
|
||||
index_uid_str.clone(),
|
||||
&index,
|
||||
features,
|
||||
)
|
||||
.with_index(query_index)?;
|
||||
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)
|
||||
.with_index(query_index)?;
|
||||
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors);
|
||||
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(
|
||||
|
@ -569,7 +569,7 @@ pub fn perform_federated_search(
|
||||
|
||||
let res: Result<(), ResponseError> = (|| {
|
||||
let search_kind =
|
||||
search_kind(&query, index_scheduler, index_uid.to_string(), &index, features)?;
|
||||
search_kind(&query, index_scheduler, index_uid.to_string(), &index)?;
|
||||
|
||||
let canonicalization_kind = match (&search_kind, &query.q) {
|
||||
(SearchKind::SemanticOnly { .. }, _) => {
|
||||
@ -631,7 +631,7 @@ pub fn perform_federated_search(
|
||||
_ => semantic_hit_count = Some(0),
|
||||
}
|
||||
|
||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
|
||||
|
||||
let time_budget = match cutoff {
|
||||
Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)),
|
||||
|
@ -63,7 +63,7 @@ pub struct SearchQuery {
|
||||
pub q: Option<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
|
||||
pub vector: Option<Vec<f32>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidHybridQuery>)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
|
||||
pub hybrid: Option<HybridQuery>,
|
||||
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
|
||||
#[schema(default = DEFAULT_SEARCH_OFFSET)]
|
||||
@ -276,12 +276,12 @@ impl fmt::Debug for SearchQuery {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Deserr, ToSchema)]
|
||||
#[deserr(error = DeserrJsonError<InvalidHybridQuery>, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[deserr(error = DeserrJsonError<InvalidSearchHybridQuery>, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct HybridQuery {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>, default)]
|
||||
#[schema(value_type = f32, default)]
|
||||
pub semantic_ratio: SemanticRatio,
|
||||
#[deserr(error = DeserrJsonError<InvalidEmbedder>)]
|
||||
#[deserr(error = DeserrJsonError<InvalidSearchEmbedder>)]
|
||||
pub embedder: String,
|
||||
}
|
||||
|
||||
@ -300,8 +300,14 @@ impl SearchKind {
|
||||
embedder_name: &str,
|
||||
vector_len: Option<usize>,
|
||||
) -> Result<Self, ResponseError> {
|
||||
let (embedder_name, embedder, quantized) =
|
||||
Self::embedder(index_scheduler, index_uid, index, embedder_name, vector_len)?;
|
||||
let (embedder_name, embedder, quantized) = Self::embedder(
|
||||
index_scheduler,
|
||||
index_uid,
|
||||
index,
|
||||
embedder_name,
|
||||
vector_len,
|
||||
Route::Search,
|
||||
)?;
|
||||
Ok(Self::SemanticOnly { embedder_name, embedder, quantized })
|
||||
}
|
||||
|
||||
@ -313,8 +319,14 @@ impl SearchKind {
|
||||
semantic_ratio: f32,
|
||||
vector_len: Option<usize>,
|
||||
) -> Result<Self, ResponseError> {
|
||||
let (embedder_name, embedder, quantized) =
|
||||
Self::embedder(index_scheduler, index_uid, index, embedder_name, vector_len)?;
|
||||
let (embedder_name, embedder, quantized) = Self::embedder(
|
||||
index_scheduler,
|
||||
index_uid,
|
||||
index,
|
||||
embedder_name,
|
||||
vector_len,
|
||||
Route::Search,
|
||||
)?;
|
||||
Ok(Self::Hybrid { embedder_name, embedder, quantized, semantic_ratio })
|
||||
}
|
||||
|
||||
@ -324,13 +336,21 @@ impl SearchKind {
|
||||
index: &Index,
|
||||
embedder_name: &str,
|
||||
vector_len: Option<usize>,
|
||||
route: Route,
|
||||
) -> Result<(String, Arc<Embedder>, bool), ResponseError> {
|
||||
let embedder_configs = index.embedding_configs(&index.read_txn()?)?;
|
||||
let embedders = index_scheduler.embedders(index_uid, embedder_configs)?;
|
||||
|
||||
let (embedder, _, quantized) = embedders
|
||||
.get(embedder_name)
|
||||
.ok_or(milli::UserError::InvalidEmbedder(embedder_name.to_owned()))
|
||||
.ok_or(match route {
|
||||
Route::Search | Route::MultiSearch => {
|
||||
milli::UserError::InvalidSearchEmbedder(embedder_name.to_owned())
|
||||
}
|
||||
Route::Similar => {
|
||||
milli::UserError::InvalidSimilarEmbedder(embedder_name.to_owned())
|
||||
}
|
||||
})
|
||||
.map_err(milli::Error::from)?;
|
||||
|
||||
if let Some(vector_len) = vector_len {
|
||||
@ -401,7 +421,7 @@ pub struct SearchQueryWithIndex {
|
||||
pub q: Option<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
||||
pub vector: Option<Vec<f32>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidHybridQuery>)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
|
||||
pub hybrid: Option<HybridQuery>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchOffset>)]
|
||||
pub offset: Option<usize>,
|
||||
@ -553,7 +573,7 @@ pub struct SimilarQuery {
|
||||
pub limit: usize,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSimilarFilter>)]
|
||||
pub filter: Option<Value>,
|
||||
#[deserr(error = DeserrJsonError<InvalidEmbedder>)]
|
||||
#[deserr(error = DeserrJsonError<InvalidSimilarEmbedder>)]
|
||||
pub embedder: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSimilarAttributesToRetrieve>)]
|
||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||
@ -1048,9 +1068,10 @@ pub struct ComputedFacets {
|
||||
pub stats: BTreeMap<String, FacetStats>,
|
||||
}
|
||||
|
||||
enum Route {
|
||||
pub enum Route {
|
||||
Search,
|
||||
MultiSearch,
|
||||
Similar,
|
||||
}
|
||||
|
||||
fn compute_facet_distribution_stats<S: AsRef<str>>(
|
||||
@ -1141,10 +1162,6 @@ struct AttributesFormat {
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum RetrieveVectors {
|
||||
/// Do not touch the `_vectors` field
|
||||
///
|
||||
/// this is the behavior when the vectorStore feature is disabled
|
||||
Ignore,
|
||||
/// Remove the `_vectors` field
|
||||
///
|
||||
/// this is the behavior when the vectorStore feature is enabled, and `retrieveVectors` is `false`
|
||||
@ -1156,15 +1173,11 @@ pub enum RetrieveVectors {
|
||||
}
|
||||
|
||||
impl RetrieveVectors {
|
||||
pub fn new(
|
||||
retrieve_vector: bool,
|
||||
features: index_scheduler::RoFeatures,
|
||||
) -> Result<Self, index_scheduler::Error> {
|
||||
match (retrieve_vector, features.check_vector("Passing `retrieveVectors` as a parameter")) {
|
||||
(true, Ok(())) => Ok(Self::Retrieve),
|
||||
(true, Err(error)) => Err(error),
|
||||
(false, Ok(())) => Ok(Self::Hide),
|
||||
(false, Err(_)) => Ok(Self::Ignore),
|
||||
pub fn new(retrieve_vector: bool) -> Self {
|
||||
if retrieve_vector {
|
||||
Self::Retrieve
|
||||
} else {
|
||||
Self::Hide
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -760,15 +760,6 @@ async fn retrieve_vectors() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_retrieve_vectors"
|
||||
}
|
||||
"###);
|
||||
let (response, _code) = index.get_all_documents_raw("?retrieveVectors=true").await;
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Passing `retrieveVectors` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
|
||||
// FETCH ALL DOCUMENTS BY POST
|
||||
let (response, _code) =
|
||||
@ -781,15 +772,6 @@ async fn retrieve_vectors() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_retrieve_vectors"
|
||||
}
|
||||
"###);
|
||||
let (response, _code) = index.get_document_by_filter(json!({ "retrieveVectors": true })).await;
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Passing `retrieveVectors` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
|
||||
// GET A SINGLE DOCUMENT
|
||||
let (response, _code) = index.get_document(0, Some(json!({"retrieveVectors": "tamo"}))).await;
|
||||
@ -801,13 +783,4 @@ async fn retrieve_vectors() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_retrieve_vectors"
|
||||
}
|
||||
"###);
|
||||
let (response, _code) = index.get_document(0, Some(json!({"retrieveVectors": true}))).await;
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Passing `retrieveVectors` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
@ -518,17 +518,6 @@ async fn get_document_by_filter() {
|
||||
async fn get_document_with_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
|
@ -78,6 +78,7 @@ async fn import_dump_v1_movie_raw() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -243,6 +244,7 @@ async fn import_dump_v1_movie_with_settings() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -394,6 +396,7 @@ async fn import_dump_v1_rubygems_with_settings() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -531,6 +534,7 @@ async fn import_dump_v2_movie_raw() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -680,6 +684,7 @@ async fn import_dump_v2_movie_with_settings() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -828,6 +833,7 @@ async fn import_dump_v2_rubygems_with_settings() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -965,6 +971,7 @@ async fn import_dump_v3_movie_raw() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -1114,6 +1121,7 @@ async fn import_dump_v3_movie_with_settings() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -1262,6 +1270,7 @@ async fn import_dump_v3_rubygems_with_settings() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -1399,6 +1408,7 @@ async fn import_dump_v4_movie_raw() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -1548,6 +1558,7 @@ async fn import_dump_v4_movie_with_settings() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -1696,6 +1707,7 @@ async fn import_dump_v4_rubygems_with_settings() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -1893,7 +1905,6 @@ async fn import_dump_v6_containing_experimental_features() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": false,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
@ -1945,6 +1956,7 @@ async fn import_dump_v6_containing_experimental_features() {
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {},
|
||||
"searchCutoffMs": null,
|
||||
"localizedAttributes": null,
|
||||
"facetSearch": true,
|
||||
@ -1988,16 +2000,7 @@ async fn generate_and_import_dump_containing_vectors() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
let mut opt = default_settings(temp.path());
|
||||
let server = Server::new_with_options(opt.clone()).await.unwrap();
|
||||
let (code, _) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let index = server.index("pets");
|
||||
let (response, code) = index
|
||||
.update_settings(json!(
|
||||
@ -2063,7 +2066,6 @@ async fn generate_and_import_dump_containing_vectors() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
|
@ -7,7 +7,7 @@ use crate::json;
|
||||
/// Feature name to test against.
|
||||
/// This will have to be changed by a different one when that feature is stabilized.
|
||||
/// All tests that need to set a feature can make use of this constant.
|
||||
const FEATURE_NAME: &str = "vectorStore";
|
||||
const FEATURE_NAME: &str = "metrics";
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn experimental_features() {
|
||||
@ -18,7 +18,6 @@ async fn experimental_features() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": false,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
@ -31,8 +30,7 @@ async fn experimental_features() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"metrics": true,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
@ -44,8 +42,7 @@ async fn experimental_features() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"metrics": true,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
@ -58,8 +55,7 @@ async fn experimental_features() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"metrics": true,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
@ -72,8 +68,7 @@ async fn experimental_features() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"metrics": true,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
@ -93,7 +88,6 @@ async fn experimental_feature_metrics() {
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": false,
|
||||
"metrics": true,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
@ -152,7 +146,7 @@ async fn errors() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown field `NotAFeature`: expected one of `vectorStore`, `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`",
|
||||
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
@ -165,7 +159,7 @@ async fn errors() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value type at `.vectorStore`: expected a boolean, but found a positive integer: `42`",
|
||||
"message": "Invalid value type at `.metrics`: expected a boolean, but found a positive integer: `42`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
@ -178,7 +172,7 @@ async fn errors() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value type at `.vectorStore`: expected a boolean, but found a string: `\"true\"`",
|
||||
"message": "Invalid value type at `.metrics`: expected a boolean, but found a string: `\"true\"`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
|
@ -11,19 +11,6 @@ async fn index_with_documents_user_provided<'a>(
|
||||
) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({ "embedders": {"default": {
|
||||
"source": "userProvided",
|
||||
@ -41,19 +28,6 @@ async fn index_with_documents_user_provided<'a>(
|
||||
async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({ "embedders": {"default": {
|
||||
"source": "huggingFace",
|
||||
|
@ -818,13 +818,6 @@ async fn test_score_details() {
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
231,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_rankingScoreDetails": {
|
||||
"words": {
|
||||
"order": 0,
|
||||
@ -1159,206 +1152,6 @@ async fn test_degraded_score_details() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn experimental_feature_vector_store() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
|
||||
let (task, _status_code) = index.add_documents(json!(documents), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({
|
||||
"vector": [1.0, 2.0, 3.0],
|
||||
"hybrid": {
|
||||
"embedder": "manual",
|
||||
},
|
||||
"showRankingScore": true
|
||||
}))
|
||||
.await;
|
||||
|
||||
{
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Passing `vector` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
index
|
||||
.search(json!({
|
||||
"retrieveVectors": true,
|
||||
"showRankingScore": true
|
||||
}), |response, code|{
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Passing `retrieveVectors` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
})
|
||||
.await;
|
||||
|
||||
let (response, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(response["vectorStore"], @"true");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({"embedders": {
|
||||
"manual": {
|
||||
"source": "userProvided",
|
||||
"dimensions": 3,
|
||||
}
|
||||
}}))
|
||||
.await;
|
||||
|
||||
meili_snap::snapshot!(response, @r###"
|
||||
{
|
||||
"taskUid": 1,
|
||||
"indexUid": "test",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response["status"]), @"\"succeeded\"");
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({
|
||||
"vector": [1.0, 2.0, 3.0],
|
||||
"hybrid": {
|
||||
"embedder": "manual",
|
||||
},
|
||||
"showRankingScore": true,
|
||||
"retrieveVectors": true,
|
||||
}))
|
||||
.await;
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
// vector search returns all documents that don't have vectors in the last bucket, like all sorts
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
|
||||
[
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"id": "287947",
|
||||
"color": [
|
||||
"green",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
2.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 1.0
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537",
|
||||
"color": [
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
2.0,
|
||||
54.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.9129111766815186
|
||||
},
|
||||
{
|
||||
"title": "Gläss",
|
||||
"id": "450465",
|
||||
"color": [
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": {
|
||||
"embeddings": [
|
||||
[
|
||||
-100.0,
|
||||
340.0,
|
||||
90.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.8106412887573242
|
||||
},
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": [
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": {
|
||||
"embeddings": [
|
||||
[
|
||||
-100.0,
|
||||
231.0,
|
||||
32.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.7412010431289673
|
||||
},
|
||||
{
|
||||
"title": "Escape Room",
|
||||
"id": "522681",
|
||||
"color": [
|
||||
"yellow",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": {
|
||||
"embeddings": [
|
||||
[
|
||||
10.0,
|
||||
-23.0,
|
||||
32.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.6972063183784485
|
||||
}
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[cfg(feature = "default")]
|
||||
#[actix_rt::test]
|
||||
async fn camelcased_words() {
|
||||
@ -1611,14 +1404,7 @@ async fn simple_search_with_strange_synonyms() {
|
||||
"color": [
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
231,
|
||||
32
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"###);
|
||||
@ -1636,14 +1422,7 @@ async fn simple_search_with_strange_synonyms() {
|
||||
"color": [
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
231,
|
||||
32
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"###);
|
||||
@ -1661,14 +1440,7 @@ async fn simple_search_with_strange_synonyms() {
|
||||
"color": [
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
231,
|
||||
32
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"###);
|
||||
|
@ -110,14 +110,7 @@ async fn simple_search_single_index() {
|
||||
"color": [
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"query": "glass",
|
||||
@ -135,14 +128,7 @@ async fn simple_search_single_index() {
|
||||
"color": [
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"query": "captain",
|
||||
@ -180,13 +166,6 @@ async fn federation_single_search_single_index() {
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "test",
|
||||
"queriesPosition": 0,
|
||||
@ -303,13 +282,6 @@ async fn federation_two_search_single_index() {
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "test",
|
||||
"queriesPosition": 0,
|
||||
@ -323,13 +295,6 @@ async fn federation_two_search_single_index() {
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "test",
|
||||
"queriesPosition": 1,
|
||||
@ -477,14 +442,7 @@ async fn simple_search_two_indexes() {
|
||||
"color": [
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"query": "glass",
|
||||
@ -510,14 +468,7 @@ async fn simple_search_two_indexes() {
|
||||
"age": 4
|
||||
}
|
||||
],
|
||||
"cattos": "pésti",
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
}
|
||||
"cattos": "pésti"
|
||||
},
|
||||
{
|
||||
"id": 654,
|
||||
@ -532,14 +483,7 @@ async fn simple_search_two_indexes() {
|
||||
"cattos": [
|
||||
"simba",
|
||||
"pestiféré"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"query": "pésti",
|
||||
@ -583,13 +527,6 @@ async fn federation_two_search_two_indexes() {
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "test",
|
||||
"queriesPosition": 0,
|
||||
@ -611,13 +548,6 @@ async fn federation_two_search_two_indexes() {
|
||||
}
|
||||
],
|
||||
"cattos": "pésti",
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 1,
|
||||
@ -638,13 +568,6 @@ async fn federation_two_search_two_indexes() {
|
||||
"simba",
|
||||
"pestiféré"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 1,
|
||||
@ -705,13 +628,6 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "test",
|
||||
"queriesPosition": 0,
|
||||
@ -733,13 +649,6 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
}
|
||||
],
|
||||
"cattos": "pésti",
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 2,
|
||||
@ -771,13 +680,6 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "test",
|
||||
"queriesPosition": 1,
|
||||
@ -791,13 +693,6 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
"yellow",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
-23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "test",
|
||||
"queriesPosition": 3,
|
||||
@ -822,13 +717,6 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
"moumoute",
|
||||
"gomez"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 4,
|
||||
@ -867,13 +755,6 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
"simba",
|
||||
"pestiféré"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 2,
|
||||
@ -896,13 +777,6 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
231,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "test",
|
||||
"queriesPosition": 6,
|
||||
@ -1391,13 +1265,6 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
}
|
||||
],
|
||||
"cattos": "pésti",
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1412,13 +1279,6 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"cattos": [
|
||||
"enigma"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1440,13 +1300,6 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"simba",
|
||||
"pestiféré"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1472,13 +1325,6 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"moumoute",
|
||||
"gomez"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1520,13 +1366,6 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
}
|
||||
],
|
||||
"cattos": "pésti",
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1548,13 +1387,6 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"simba",
|
||||
"pestiféré"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1580,13 +1412,6 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"moumoute",
|
||||
"gomez"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 1,
|
||||
@ -1714,13 +1539,6 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
}
|
||||
],
|
||||
"cattos": "pésti",
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 1,
|
||||
@ -1746,13 +1564,6 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"moumoute",
|
||||
"gomez"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 1,
|
||||
@ -1767,13 +1578,6 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"cattos": [
|
||||
"enigma"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1795,13 +1599,6 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"simba",
|
||||
"pestiféré"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 1,
|
||||
@ -1843,13 +1640,6 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"simba",
|
||||
"pestiféré"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1872,13 +1662,6 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
}
|
||||
],
|
||||
"cattos": "pésti",
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 0,
|
||||
@ -1904,13 +1687,6 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"moumoute",
|
||||
"gomez"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "nested",
|
||||
"queriesPosition": 1,
|
||||
@ -2101,13 +1877,6 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2122,13 +1891,6 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"yellow",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
-23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2143,13 +1905,6 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2164,13 +1919,6 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
231,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2185,13 +1933,6 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"green",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2226,13 +1967,6 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 1,
|
||||
@ -2413,13 +2147,6 @@ async fn federation_sort_different_ranking_rules() {
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2434,13 +2161,6 @@ async fn federation_sort_different_ranking_rules() {
|
||||
"yellow",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
-23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2455,13 +2175,6 @@ async fn federation_sort_different_ranking_rules() {
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2476,13 +2189,6 @@ async fn federation_sort_different_ranking_rules() {
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
231,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2497,13 +2203,6 @@ async fn federation_sort_different_ranking_rules() {
|
||||
"green",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2714,13 +2413,6 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2755,13 +2447,6 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"yellow",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
10,
|
||||
-23,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2776,13 +2461,6 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"blue",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
340,
|
||||
90
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2797,13 +2475,6 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"green",
|
||||
"red"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
-100,
|
||||
231,
|
||||
32
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2818,13 +2489,6 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"green",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 0,
|
||||
@ -2879,13 +2543,6 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"yellow",
|
||||
"blue"
|
||||
],
|
||||
"_vectors": {
|
||||
"manual": [
|
||||
1,
|
||||
2,
|
||||
54
|
||||
]
|
||||
},
|
||||
"_federation": {
|
||||
"indexUid": "movies",
|
||||
"queriesPosition": 1,
|
||||
@ -4094,13 +3751,6 @@ async fn federation_non_federated_contains_federation_option() {
|
||||
#[actix_rt::test]
|
||||
async fn federation_vector_single_index() {
|
||||
let server = Server::new().await;
|
||||
let (_, code) = server
|
||||
.set_features(json!({
|
||||
"vectorStore": true
|
||||
}))
|
||||
.await;
|
||||
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
let index = server.index("vectors");
|
||||
|
||||
@ -4302,13 +3952,6 @@ async fn federation_vector_single_index() {
|
||||
#[actix_rt::test]
|
||||
async fn federation_vector_two_indexes() {
|
||||
let server = Server::new().await;
|
||||
let (_, code) = server
|
||||
.set_features(json!({
|
||||
"vectorStore": true
|
||||
}))
|
||||
.await;
|
||||
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
let index = server.index("vectors-animal");
|
||||
|
||||
|
@ -152,7 +152,7 @@ test_setting_routes!(
|
||||
{
|
||||
setting: embedders,
|
||||
update_verb: patch,
|
||||
default_value: null
|
||||
default_value: {}
|
||||
},
|
||||
{
|
||||
setting: facet_search,
|
||||
@ -197,7 +197,7 @@ async fn get_settings() {
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
let settings = response.as_object().unwrap();
|
||||
assert_eq!(settings.keys().len(), 19);
|
||||
assert_eq!(settings.keys().len(), 20);
|
||||
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||
assert_eq!(settings["filterableAttributes"], json!([]));
|
||||
@ -230,23 +230,12 @@ async fn get_settings() {
|
||||
assert_eq!(settings["searchCutoffMs"], json!(null));
|
||||
assert_eq!(settings["prefixSearch"], json!("indexingTime"));
|
||||
assert_eq!(settings["facetSearch"], json!(true));
|
||||
assert_eq!(settings["embedders"], json!({}));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn secrets_are_hidden_in_settings() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let index = server.index("test");
|
||||
let (response, _code) = index.create(None).await;
|
||||
|
@ -8,7 +8,6 @@ use crate::json;
|
||||
async fn similar_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
@ -29,7 +28,6 @@ async fn similar_unexisting_index() {
|
||||
async fn similar_unexisting_parameter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "marin": "hello"}), |response, code| {
|
||||
@ -39,28 +37,10 @@ async fn similar_unexisting_parameter() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_feature_not_enabled() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) = index.similar_post(json!({"id": 287947, "embedder": "manual"})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Using the similar API requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -91,7 +71,6 @@ async fn similar_bad_id() {
|
||||
async fn similar_bad_ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -122,7 +101,6 @@ async fn similar_bad_ranking_score_threshold() {
|
||||
async fn similar_invalid_ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -153,7 +131,6 @@ async fn similar_invalid_ranking_score_threshold() {
|
||||
async fn similar_invalid_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -184,7 +161,6 @@ async fn similar_invalid_id() {
|
||||
async fn similar_not_found_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -216,7 +192,6 @@ async fn similar_not_found_id() {
|
||||
async fn similar_bad_offset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -259,7 +234,6 @@ async fn similar_bad_offset() {
|
||||
async fn similar_bad_limit() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -304,7 +278,6 @@ async fn similar_bad_filter() {
|
||||
// Thus the error message is not generated by deserr but written by us.
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -344,7 +317,6 @@ async fn similar_bad_filter() {
|
||||
async fn filter_invalid_syntax_object() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -383,7 +355,6 @@ async fn filter_invalid_syntax_object() {
|
||||
async fn filter_invalid_syntax_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -422,7 +393,6 @@ async fn filter_invalid_syntax_array() {
|
||||
async fn filter_invalid_syntax_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -463,7 +433,6 @@ async fn filter_invalid_syntax_string() {
|
||||
async fn filter_invalid_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -504,7 +473,6 @@ async fn filter_invalid_attribute_array() {
|
||||
async fn filter_invalid_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -545,7 +513,6 @@ async fn filter_invalid_attribute_string() {
|
||||
async fn filter_reserved_geo_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -586,7 +553,6 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
async fn filter_reserved_geo_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -627,7 +593,6 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
async fn filter_reserved_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -668,7 +633,6 @@ async fn filter_reserved_attribute_array() {
|
||||
async fn filter_reserved_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -709,7 +673,6 @@ async fn filter_reserved_attribute_string() {
|
||||
async fn filter_reserved_geo_point_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -750,7 +713,6 @@ async fn filter_reserved_geo_point_array() {
|
||||
async fn filter_reserved_geo_point_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -790,7 +752,6 @@ async fn filter_reserved_geo_point_string() {
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_retrieve_vectors() {
|
||||
let server = Server::new().await;
|
||||
server.set_features(json!({"vectorStore": true})).await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) =
|
||||
@ -839,3 +800,86 @@ async fn similar_bad_retrieve_vectors() {
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_embedder() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"embedders": {
|
||||
"manual": {
|
||||
"source": "userProvided",
|
||||
"dimensions": 3,
|
||||
}
|
||||
},
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Cannot find embedder with name `auto`.",
|
||||
"code": "invalid_similar_embedder",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_similar_embedder"
|
||||
});
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "embedder": "auto"}), |response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
})
|
||||
.await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Invalid value type at `.embedder`: expected a string, but found a positive integer: `42`",
|
||||
"code": "invalid_similar_embedder",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_similar_embedder"
|
||||
});
|
||||
|
||||
let (response, code) = index.similar_post(json!({"id": 287947, "embedder": 42})).await;
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Invalid value type at `.embedder`: expected a string, but found null",
|
||||
"code": "invalid_similar_embedder",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_similar_embedder"
|
||||
});
|
||||
|
||||
let (response, code) = index.similar_post(json!({"id": 287947, "embedder": null})).await;
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Missing field `embedder`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
});
|
||||
|
||||
let (response, code) = index.similar_post(json!({"id": 287947})).await;
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Missing parameter `embedder`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
});
|
||||
let (response, code) = index.similar_get("?id=287947").await;
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
}
|
||||
|
@ -49,17 +49,6 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
async fn basic() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -246,17 +235,6 @@ async fn basic() {
|
||||
async fn ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -527,17 +505,6 @@ async fn ranking_score_threshold() {
|
||||
async fn filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -656,17 +623,6 @@ async fn filter() {
|
||||
async fn limit_and_offset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
|
@ -8,17 +8,6 @@ use crate::vector::generate_default_user_provided_documents;
|
||||
async fn retrieve_binary_quantize_status_in_the_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -78,17 +67,6 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
|
||||
async fn binary_quantize_before_sending_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -163,17 +141,6 @@ async fn binary_quantize_before_sending_documents() {
|
||||
async fn binary_quantize_after_sending_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -261,17 +228,6 @@ async fn binary_quantize_after_sending_documents() {
|
||||
async fn try_to_disable_binary_quantization() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
|
@ -13,36 +13,13 @@ use crate::common::{default_settings, GetAllDocumentsOptions, Server};
|
||||
use crate::json;
|
||||
|
||||
async fn get_server_vector() -> Server {
|
||||
let server = Server::new().await;
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
server
|
||||
Server::new().await
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_remove_user_provided() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -187,17 +164,6 @@ async fn add_remove_user_provided() {
|
||||
|
||||
async fn generate_default_user_provided_documents(server: &Server) -> Index {
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -654,17 +620,6 @@ async fn add_remove_one_vector_4588() {
|
||||
// https://github.com/meilisearch/meilisearch/issues/4588
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
|
@ -8,17 +8,6 @@ use crate::vector::generate_default_user_provided_documents;
|
||||
async fn field_unavailable_for_source() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -55,17 +44,6 @@ async fn field_unavailable_for_source() {
|
||||
async fn update_embedder() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(value, @r###"
|
||||
{
|
||||
"vectorStore": true,
|
||||
"metrics": false,
|
||||
"logsRoute": false,
|
||||
"editDocumentsByFunction": false,
|
||||
"containsFilter": false
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -271,9 +249,9 @@ async fn reset_embedder_documents() {
|
||||
snapshot!(json_string!(documents), @r###"
|
||||
{
|
||||
"message": "Cannot find embedder with name `default`.",
|
||||
"code": "invalid_embedder",
|
||||
"code": "invalid_search_embedder",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_embedder"
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_embedder"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
@ -222,7 +222,9 @@ and can not be more than 511 bytes.", .document_id.to_string()
|
||||
#[error("Too many embedders in the configuration. Found {0}, but limited to 256.")]
|
||||
TooManyEmbedders(usize),
|
||||
#[error("Cannot find embedder with name `{0}`.")]
|
||||
InvalidEmbedder(String),
|
||||
InvalidSearchEmbedder(String),
|
||||
#[error("Cannot find embedder with name `{0}`.")]
|
||||
InvalidSimilarEmbedder(String),
|
||||
#[error("Too many vectors for document with id {0}: found {1}, but limited to 256.")]
|
||||
TooManyVectors(String, usize),
|
||||
#[error("`.embedders.{embedder_name}`: Field `{field}` unavailable for source `{source_}` (only available for sources: {}). Available fields: {}",
|
||||
|
@ -32,7 +32,7 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
|
||||
.index
|
||||
.embedder_category_id
|
||||
.get(ctx.txn, embedder_name)?
|
||||
.ok_or_else(|| crate::UserError::InvalidEmbedder(embedder_name.to_owned()))?;
|
||||
.ok_or_else(|| crate::UserError::InvalidSearchEmbedder(embedder_name.to_owned()))?;
|
||||
|
||||
Ok(Self {
|
||||
query: None,
|
||||
|
@ -65,10 +65,9 @@ impl<'a> Similar<'a> {
|
||||
let universe = universe;
|
||||
|
||||
let embedder_index =
|
||||
self.index
|
||||
.embedder_category_id
|
||||
.get(self.rtxn, &self.embedder_name)?
|
||||
.ok_or_else(|| crate::UserError::InvalidEmbedder(self.embedder_name.to_owned()))?;
|
||||
self.index.embedder_category_id.get(self.rtxn, &self.embedder_name)?.ok_or_else(
|
||||
|| crate::UserError::InvalidSimilarEmbedder(self.embedder_name.to_owned()),
|
||||
)?;
|
||||
|
||||
let reader = ArroyWrapper::new(self.index.vector_arroy, embedder_index, self.quantized);
|
||||
let results = reader.nns_by_item(
|
||||
|
Loading…
Reference in New Issue
Block a user