2019-10-31 22:00:36 +08:00
|
|
|
use std::collections::HashMap;
|
|
|
|
use std::collections::HashSet;
|
|
|
|
use std::time::Duration;
|
|
|
|
|
2020-04-03 01:53:51 +08:00
|
|
|
use log::warn;
|
2019-11-26 18:06:55 +08:00
|
|
|
use meilisearch_core::Index;
|
2020-04-11 01:05:05 +08:00
|
|
|
use actix_web as aweb;
|
|
|
|
use actix_web::{get, post, web};
|
2019-10-31 22:00:36 +08:00
|
|
|
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
use crate::error::ResponseError;
|
|
|
|
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit, SearchResult};
|
2020-04-09 17:11:48 +08:00
|
|
|
use crate::routes::IndexParam;
|
2020-04-11 01:05:05 +08:00
|
|
|
use crate::Data;
|
2019-10-31 22:00:36 +08:00
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
2020-04-08 20:13:45 +08:00
|
|
|
pub struct SearchQuery {
|
2019-10-31 22:00:36 +08:00
|
|
|
q: String,
|
|
|
|
offset: Option<usize>,
|
|
|
|
limit: Option<usize>,
|
|
|
|
attributes_to_retrieve: Option<String>,
|
|
|
|
attributes_to_crop: Option<String>,
|
|
|
|
crop_length: Option<usize>,
|
|
|
|
attributes_to_highlight: Option<String>,
|
|
|
|
filters: Option<String>,
|
|
|
|
timeout_ms: Option<u64>,
|
|
|
|
matches: Option<bool>,
|
|
|
|
}
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
#[get("/indexes/{index_uid}/search")]
|
|
|
|
pub async fn search_with_url_query(
|
|
|
|
data: web::Data<Data>,
|
2020-04-09 17:11:48 +08:00
|
|
|
path: web::Path<IndexParam>,
|
2020-04-08 01:34:57 +08:00
|
|
|
params: web::Query<SearchQuery>,
|
2020-04-09 16:39:34 +08:00
|
|
|
) -> aweb::Result<web::Json<SearchResult>> {
|
2020-04-11 01:05:05 +08:00
|
|
|
let index = data
|
|
|
|
.db
|
|
|
|
.open_index(path.index_uid.clone())
|
2020-04-09 17:11:48 +08:00
|
|
|
.ok_or(ResponseError::IndexNotFound(path.index_uid.clone()))?;
|
2020-04-08 01:34:57 +08:00
|
|
|
|
2020-04-11 01:05:05 +08:00
|
|
|
let reader = data
|
|
|
|
.db
|
|
|
|
.main_read_txn()
|
2020-04-09 16:39:34 +08:00
|
|
|
.map_err(|err| ResponseError::Internal(err.to_string()))?;
|
2019-10-31 22:00:36 +08:00
|
|
|
|
2020-01-23 18:30:18 +08:00
|
|
|
let schema = index
|
|
|
|
.main
|
2020-04-08 01:34:57 +08:00
|
|
|
.schema(&reader)
|
2020-04-09 16:39:34 +08:00
|
|
|
.map_err(|err| ResponseError::Internal(err.to_string()))?
|
2020-04-11 01:05:05 +08:00
|
|
|
.ok_or(ResponseError::Internal(
|
|
|
|
"Impossible to retrieve the schema".to_string(),
|
|
|
|
))?;
|
2019-10-31 22:00:36 +08:00
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
let mut search_builder = index.new_search(params.q.clone());
|
2019-10-31 22:00:36 +08:00
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
if let Some(offset) = params.offset {
|
2019-10-31 22:00:36 +08:00
|
|
|
search_builder.offset(offset);
|
|
|
|
}
|
2020-04-08 01:34:57 +08:00
|
|
|
if let Some(limit) = params.limit {
|
2019-10-31 22:00:36 +08:00
|
|
|
search_builder.limit(limit);
|
|
|
|
}
|
|
|
|
|
2020-04-03 01:53:51 +08:00
|
|
|
let available_attributes = schema.displayed_name();
|
|
|
|
let mut restricted_attributes: HashSet<&str>;
|
2020-04-08 01:34:57 +08:00
|
|
|
match ¶ms.attributes_to_retrieve {
|
2020-04-03 01:53:51 +08:00
|
|
|
Some(attributes_to_retrieve) => {
|
2020-04-09 22:57:08 +08:00
|
|
|
let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect();
|
|
|
|
if attributes_to_retrieve.contains("*") {
|
|
|
|
restricted_attributes = available_attributes.clone();
|
|
|
|
} else {
|
|
|
|
restricted_attributes = HashSet::new();
|
|
|
|
for attr in attributes_to_retrieve {
|
|
|
|
if available_attributes.contains(attr) {
|
|
|
|
restricted_attributes.insert(attr);
|
|
|
|
search_builder.add_retrievable_field(attr.to_string());
|
|
|
|
} else {
|
|
|
|
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-04-03 01:53:51 +08:00
|
|
|
},
|
|
|
|
None => {
|
|
|
|
restricted_attributes = available_attributes.clone();
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
}
|
2020-01-03 17:00:04 +08:00
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
if let Some(attributes_to_crop) = ¶ms.attributes_to_crop {
|
|
|
|
let default_length = params.crop_length.unwrap_or(200);
|
2020-04-03 01:53:51 +08:00
|
|
|
let mut final_attributes: HashMap<String, usize> = HashMap::new();
|
|
|
|
|
|
|
|
for attribute in attributes_to_crop.split(',') {
|
|
|
|
let mut attribute = attribute.split(':');
|
|
|
|
let attr = attribute.next();
|
|
|
|
let length = attribute.next().and_then(|s| s.parse().ok()).unwrap_or(default_length);
|
|
|
|
match attr {
|
|
|
|
Some("*") => {
|
|
|
|
for attr in &restricted_attributes {
|
|
|
|
final_attributes.insert(attr.to_string(), length);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Some(attr) => {
|
|
|
|
if available_attributes.contains(attr) {
|
|
|
|
final_attributes.insert(attr.to_string(), length);
|
|
|
|
} else {
|
|
|
|
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
None => (),
|
|
|
|
}
|
2019-11-15 19:04:46 +08:00
|
|
|
}
|
2020-04-03 01:53:51 +08:00
|
|
|
|
|
|
|
search_builder.attributes_to_crop(final_attributes);
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
if let Some(attributes_to_highlight) = ¶ms.attributes_to_highlight {
|
2020-04-03 01:53:51 +08:00
|
|
|
let mut final_attributes: HashSet<String> = HashSet::new();
|
2020-04-09 22:57:08 +08:00
|
|
|
for attribute in attributes_to_highlight.split(',') {
|
2020-04-03 01:53:51 +08:00
|
|
|
if attribute == "*" {
|
|
|
|
for attr in &restricted_attributes {
|
|
|
|
final_attributes.insert(attr.to_string());
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if available_attributes.contains(attribute) {
|
|
|
|
final_attributes.insert(attribute.to_string());
|
|
|
|
} else {
|
|
|
|
warn!("The attributes {:?} present in attributesToHighlight parameter doesn't exist", attribute);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
search_builder.attributes_to_highlight(final_attributes);
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
if let Some(filters) = ¶ms.filters {
|
|
|
|
search_builder.filters(filters.to_string());
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
if let Some(timeout_ms) = params.timeout_ms {
|
2019-10-31 22:00:36 +08:00
|
|
|
search_builder.timeout(Duration::from_millis(timeout_ms));
|
|
|
|
}
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
if let Some(matches) = params.matches {
|
2019-10-31 22:00:36 +08:00
|
|
|
if matches {
|
|
|
|
search_builder.get_matches();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let response = match search_builder.search(&reader) {
|
|
|
|
Ok(response) => response,
|
2020-04-09 16:39:34 +08:00
|
|
|
Err(Error::Internal(message)) => return Err(ResponseError::Internal(message).into()),
|
|
|
|
Err(others) => return Err(ResponseError::BadRequest(others.to_string()).into()),
|
2019-10-31 22:00:36 +08:00
|
|
|
};
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
Ok(web::Json(response))
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
2020-04-08 20:13:45 +08:00
|
|
|
pub struct SearchMultiBody {
|
2019-10-31 22:00:36 +08:00
|
|
|
indexes: HashSet<String>,
|
|
|
|
query: String,
|
|
|
|
offset: Option<usize>,
|
|
|
|
limit: Option<usize>,
|
|
|
|
attributes_to_retrieve: Option<HashSet<String>>,
|
2020-01-03 01:31:46 +08:00
|
|
|
searchable_attributes: Option<HashSet<String>>,
|
2019-10-31 22:00:36 +08:00
|
|
|
attributes_to_crop: Option<HashMap<String, usize>>,
|
|
|
|
attributes_to_highlight: Option<HashSet<String>>,
|
|
|
|
filters: Option<String>,
|
|
|
|
timeout_ms: Option<u64>,
|
|
|
|
matches: Option<bool>,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
2020-04-08 20:13:45 +08:00
|
|
|
pub struct SearchMultiBodyResponse {
|
2019-10-31 22:00:36 +08:00
|
|
|
hits: HashMap<String, Vec<SearchHit>>,
|
|
|
|
offset: usize,
|
|
|
|
hits_per_page: usize,
|
|
|
|
processing_time_ms: usize,
|
|
|
|
query: String,
|
|
|
|
}
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
#[post("/indexes/search")]
|
|
|
|
pub async fn search_multi_index(
|
|
|
|
data: web::Data<Data>,
|
|
|
|
body: web::Json<SearchMultiBody>,
|
2020-04-09 16:39:34 +08:00
|
|
|
) -> aweb::Result<web::Json<SearchMultiBodyResponse>> {
|
2019-10-31 22:00:36 +08:00
|
|
|
let mut index_list = body.clone().indexes;
|
|
|
|
|
|
|
|
for index in index_list.clone() {
|
|
|
|
if index == "*" {
|
2020-04-08 01:34:57 +08:00
|
|
|
index_list = data.db.indexes_uids().into_iter().collect();
|
2019-11-21 00:28:46 +08:00
|
|
|
break;
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut offset = 0;
|
|
|
|
let mut count = 20;
|
2020-04-08 01:34:57 +08:00
|
|
|
let query = body.query.clone();
|
2019-10-31 22:00:36 +08:00
|
|
|
|
|
|
|
if let Some(body_offset) = body.offset {
|
|
|
|
if let Some(limit) = body.limit {
|
|
|
|
offset = body_offset;
|
|
|
|
count = limit;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let par_body = body.clone();
|
2020-04-08 01:34:57 +08:00
|
|
|
let responses_per_index: Vec<(String, SearchResult)> = index_list
|
2019-10-31 22:00:36 +08:00
|
|
|
.into_par_iter()
|
2019-11-19 23:15:49 +08:00
|
|
|
.map(move |index_uid| {
|
2020-04-08 01:34:57 +08:00
|
|
|
let index = data.db.open_index(&index_uid).unwrap();
|
2019-10-31 22:00:36 +08:00
|
|
|
|
|
|
|
let mut search_builder = index.new_search(par_body.query.clone());
|
|
|
|
|
|
|
|
search_builder.offset(offset);
|
|
|
|
search_builder.limit(count);
|
|
|
|
|
|
|
|
if let Some(attributes_to_retrieve) = par_body.attributes_to_retrieve.clone() {
|
|
|
|
search_builder.attributes_to_retrieve(attributes_to_retrieve);
|
|
|
|
}
|
|
|
|
if let Some(attributes_to_crop) = par_body.attributes_to_crop.clone() {
|
|
|
|
search_builder.attributes_to_crop(attributes_to_crop);
|
|
|
|
}
|
|
|
|
if let Some(attributes_to_highlight) = par_body.attributes_to_highlight.clone() {
|
|
|
|
search_builder.attributes_to_highlight(attributes_to_highlight);
|
|
|
|
}
|
|
|
|
if let Some(filters) = par_body.filters.clone() {
|
|
|
|
search_builder.filters(filters);
|
|
|
|
}
|
|
|
|
if let Some(timeout_ms) = par_body.timeout_ms {
|
2019-11-22 20:23:48 +08:00
|
|
|
search_builder.timeout(Duration::from_millis(timeout_ms));
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
if let Some(matches) = par_body.matches {
|
|
|
|
if matches {
|
|
|
|
search_builder.get_matches();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
let reader = data.db.main_read_txn().unwrap();
|
|
|
|
let response = search_builder.search(&reader).unwrap();
|
|
|
|
|
|
|
|
(index_uid, response)
|
2019-10-31 22:00:36 +08:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let mut hits_map = HashMap::new();
|
|
|
|
|
|
|
|
let mut max_query_time = 0;
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
for (index_uid, response) in responses_per_index {
|
|
|
|
if response.processing_time_ms > max_query_time {
|
|
|
|
max_query_time = response.processing_time_ms;
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
2020-04-08 01:34:57 +08:00
|
|
|
hits_map.insert(index_uid, response.hits);
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
let response = SearchMultiBodyResponse {
|
|
|
|
hits: hits_map,
|
|
|
|
offset,
|
|
|
|
hits_per_page: count,
|
|
|
|
processing_time_ms: max_query_time,
|
2020-04-08 01:34:57 +08:00
|
|
|
query,
|
2019-10-31 22:00:36 +08:00
|
|
|
};
|
|
|
|
|
2020-04-08 01:34:57 +08:00
|
|
|
Ok(web::Json(response))
|
2019-10-31 22:00:36 +08:00
|
|
|
}
|