mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-23 02:27:40 +08:00
remove search multi index
This commit is contained in:
parent
27b3b53bc5
commit
ff3149f6fa
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1527,7 +1527,6 @@ dependencies = [
|
|||||||
"once_cell",
|
"once_cell",
|
||||||
"pretty-bytes",
|
"pretty-bytes",
|
||||||
"rand 0.7.3",
|
"rand 0.7.3",
|
||||||
"rayon",
|
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_qs",
|
"serde_qs",
|
||||||
|
@ -29,7 +29,6 @@ meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.10.1"}
|
|||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
pretty-bytes = "0.2.2"
|
pretty-bytes = "0.2.2"
|
||||||
rand = "0.7.3"
|
rand = "0.7.3"
|
||||||
rayon = "1.3.0"
|
|
||||||
serde = { version = "1.0.105", features = ["derive"] }
|
serde = { version = "1.0.105", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||||
serde_qs = "0.5.2"
|
serde_qs = "0.5.2"
|
||||||
@ -47,13 +46,14 @@ actix-http = "1"
|
|||||||
actix-files = "0.2.1"
|
actix-files = "0.2.1"
|
||||||
actix-cors = "0.2.0"
|
actix-cors = "0.2.0"
|
||||||
actix-service = "1.0.5"
|
actix-service = "1.0.5"
|
||||||
tokio = { version = "0.2.18", features = ["macros", "time"] }
|
tokio = { version = "0.2.18", features = ["macros"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
http-service = "0.4.0"
|
http-service = "0.4.0"
|
||||||
http-service-mock = "0.4.0"
|
http-service-mock = "0.4.0"
|
||||||
tempdir = "0.3.7"
|
tempdir = "0.3.7"
|
||||||
once_cell = "1.3.1"
|
once_cell = "1.3.1"
|
||||||
|
tokio = { version = "0.2.18", features = ["macros", "time"] }
|
||||||
|
|
||||||
[dev-dependencies.assert-json-diff]
|
[dev-dependencies.assert-json-diff]
|
||||||
git = "https://github.com/qdequele/assert-json-diff"
|
git = "https://github.com/qdequele/assert-json-diff"
|
||||||
|
@ -33,7 +33,6 @@ pub fn create_app(
|
|||||||
.service(routes::load_html)
|
.service(routes::load_html)
|
||||||
.service(routes::load_css)
|
.service(routes::load_css)
|
||||||
.service(routes::search::search_with_url_query)
|
.service(routes::search::search_with_url_query)
|
||||||
.service(routes::search::search_multi_index)
|
|
||||||
.service(routes::document::get_document)
|
.service(routes::document::get_document)
|
||||||
.service(routes::document::get_all_documents)
|
.service(routes::document::get_all_documents)
|
||||||
.wrap(helpers::Authentication::Private)
|
.wrap(helpers::Authentication::Private)
|
||||||
|
@ -1,16 +1,14 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::{HashSet, HashMap};
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use log::warn;
|
use log::warn;
|
||||||
use meilisearch_core::Index;
|
use meilisearch_core::Index;
|
||||||
use actix_web as aweb;
|
use actix_web as aweb;
|
||||||
use actix_web::{get, post, web};
|
use actix_web::{get, web};
|
||||||
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
use serde::{Deserialize};
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
use crate::error::ResponseError;
|
||||||
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit, SearchResult};
|
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchResult};
|
||||||
use crate::routes::IndexParam;
|
use crate::routes::IndexParam;
|
||||||
use crate::Data;
|
use crate::Data;
|
||||||
|
|
||||||
@ -155,115 +153,3 @@ pub async fn search_with_url_query(
|
|||||||
|
|
||||||
Ok(web::Json(response))
|
Ok(web::Json(response))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
|
||||||
pub struct SearchMultiBody {
|
|
||||||
indexes: HashSet<String>,
|
|
||||||
query: String,
|
|
||||||
offset: Option<usize>,
|
|
||||||
limit: Option<usize>,
|
|
||||||
attributes_to_retrieve: Option<HashSet<String>>,
|
|
||||||
searchable_attributes: Option<HashSet<String>>,
|
|
||||||
attributes_to_crop: Option<HashMap<String, usize>>,
|
|
||||||
attributes_to_highlight: Option<HashSet<String>>,
|
|
||||||
filters: Option<String>,
|
|
||||||
timeout_ms: Option<u64>,
|
|
||||||
matches: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct SearchMultiBodyResponse {
|
|
||||||
hits: HashMap<String, Vec<SearchHit>>,
|
|
||||||
offset: usize,
|
|
||||||
hits_per_page: usize,
|
|
||||||
processing_time_ms: usize,
|
|
||||||
query: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/indexes/search")]
|
|
||||||
pub async fn search_multi_index(
|
|
||||||
data: web::Data<Data>,
|
|
||||||
body: web::Json<SearchMultiBody>,
|
|
||||||
) -> aweb::Result<web::Json<SearchMultiBodyResponse>> {
|
|
||||||
let mut index_list = body.clone().indexes;
|
|
||||||
|
|
||||||
for index in index_list.clone() {
|
|
||||||
if index == "*" {
|
|
||||||
index_list = data.db.indexes_uids().into_iter().collect();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut offset = 0;
|
|
||||||
let mut count = 20;
|
|
||||||
let query = body.query.clone();
|
|
||||||
|
|
||||||
if let Some(body_offset) = body.offset {
|
|
||||||
if let Some(limit) = body.limit {
|
|
||||||
offset = body_offset;
|
|
||||||
count = limit;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let par_body = body.clone();
|
|
||||||
let responses_per_index: Vec<(String, SearchResult)> = index_list
|
|
||||||
.into_par_iter()
|
|
||||||
.map(move |index_uid| {
|
|
||||||
let index = data.db.open_index(&index_uid).unwrap();
|
|
||||||
|
|
||||||
let mut search_builder = index.new_search(par_body.query.clone());
|
|
||||||
|
|
||||||
search_builder.offset(offset);
|
|
||||||
search_builder.limit(count);
|
|
||||||
|
|
||||||
if let Some(attributes_to_retrieve) = par_body.attributes_to_retrieve.clone() {
|
|
||||||
search_builder.attributes_to_retrieve(attributes_to_retrieve);
|
|
||||||
}
|
|
||||||
if let Some(attributes_to_crop) = par_body.attributes_to_crop.clone() {
|
|
||||||
search_builder.attributes_to_crop(attributes_to_crop);
|
|
||||||
}
|
|
||||||
if let Some(attributes_to_highlight) = par_body.attributes_to_highlight.clone() {
|
|
||||||
search_builder.attributes_to_highlight(attributes_to_highlight);
|
|
||||||
}
|
|
||||||
if let Some(filters) = par_body.filters.clone() {
|
|
||||||
search_builder.filters(filters);
|
|
||||||
}
|
|
||||||
if let Some(timeout_ms) = par_body.timeout_ms {
|
|
||||||
search_builder.timeout(Duration::from_millis(timeout_ms));
|
|
||||||
}
|
|
||||||
if let Some(matches) = par_body.matches {
|
|
||||||
if matches {
|
|
||||||
search_builder.get_matches();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let reader = data.db.main_read_txn().unwrap();
|
|
||||||
let response = search_builder.search(&reader).unwrap();
|
|
||||||
|
|
||||||
(index_uid, response)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut hits_map = HashMap::new();
|
|
||||||
|
|
||||||
let mut max_query_time = 0;
|
|
||||||
|
|
||||||
for (index_uid, response) in responses_per_index {
|
|
||||||
if response.processing_time_ms > max_query_time {
|
|
||||||
max_query_time = response.processing_time_ms;
|
|
||||||
}
|
|
||||||
hits_map.insert(index_uid, response.hits);
|
|
||||||
}
|
|
||||||
|
|
||||||
let response = SearchMultiBodyResponse {
|
|
||||||
hits: hits_map,
|
|
||||||
offset,
|
|
||||||
hits_per_page: count,
|
|
||||||
processing_time_ms: max_query_time,
|
|
||||||
query,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(web::Json(response))
|
|
||||||
}
|
|
||||||
|
Loading…
Reference in New Issue
Block a user