Merge pull request #735 from MarinPostma/post-search-route

Post search route
This commit is contained in:
Clément Renault 2020-06-15 22:32:12 +02:00 committed by GitHub
commit 95d1762f19
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 770 additions and 463 deletions

View File

@ -21,6 +21,7 @@
- Add support for error code reporting (#703) - Add support for error code reporting (#703)
- Allow the dashboard to query private servers (#732) - Allow the dashboard to query private servers (#732)
- Add telemetry (#720) - Add telemetry (#720)
- Add post route for search (#735)
## v0.10.1 ## v0.10.1

11
Cargo.lock generated
View File

@ -1604,6 +1604,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"serde_qs", "serde_qs",
"serde_url_params",
"sha2", "sha2",
"siphasher", "siphasher",
"slice-group-by", "slice-group-by",
@ -2476,6 +2477,16 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "serde_url_params"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d24680ccd1ad7cdee9e8affa70f37d081b3d14d3800d33a28f474d0f7a55f305"
dependencies = [
"serde",
"url",
]
[[package]] [[package]]
name = "serde_urlencoded" name = "serde_urlencoded"
version = "0.6.1" version = "0.6.1"

View File

@ -73,6 +73,7 @@ optional = true
[dev-dependencies] [dev-dependencies]
tempdir = "0.3.7" tempdir = "0.3.7"
tokio = { version = "0.2.18", features = ["macros", "time"] } tokio = { version = "0.2.18", features = ["macros", "time"] }
serde_url_params = "0.2.0"
[dev-dependencies.assert-json-diff] [dev-dependencies.assert-json-diff]
git = "https://github.com/qdequele/assert-json-diff" git = "https://github.com/qdequele/assert-json-diff"

View File

@ -69,7 +69,8 @@ async fn main() -> Result<(), MainError> {
.wrap( .wrap(
Cors::new() Cors::new()
.send_wildcard() .send_wildcard()
.allowed_header("x-meili-api-key") .allowed_headers(vec!["content-type","x-meili-api-key"])
.max_age(86_400) // 24h
.finish(), .finish(),
) )
.wrap(middleware::Logger::default()) .wrap(middleware::Logger::default())

View File

@ -3,12 +3,12 @@ use std::collections::{HashSet, HashMap};
use log::warn; use log::warn;
use actix_web::web; use actix_web::web;
use actix_web::HttpResponse; use actix_web::HttpResponse;
use actix_web_macros::get; use actix_web_macros::{get, post};
use serde::Deserialize; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use crate::error::{Error, FacetCountError, ResponseError}; use crate::error::{Error, FacetCountError, ResponseError};
use crate::helpers::meilisearch::IndexSearchExt; use crate::helpers::meilisearch::{IndexSearchExt, SearchResult};
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::IndexParam; use crate::routes::IndexParam;
use crate::Data; use crate::Data;
@ -17,12 +17,13 @@ use meilisearch_core::facets::FacetFilter;
use meilisearch_schema::{Schema, FieldId}; use meilisearch_schema::{Schema, FieldId};
pub fn services(cfg: &mut web::ServiceConfig) { pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(search_with_url_query); cfg.service(search_with_post)
.service(search_with_url_query);
} }
#[derive(Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SearchQuery { pub struct SearchQuery {
q: String, q: String,
offset: Option<usize>, offset: Option<usize>,
limit: Option<usize>, limit: Option<usize>,
@ -42,126 +43,176 @@ async fn search_with_url_query(
path: web::Path<IndexParam>, path: web::Path<IndexParam>,
params: web::Query<SearchQuery>, params: web::Query<SearchQuery>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let index = data let search_result = params.search(&path.index_uid, data)?;
.db
.open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let schema = index
.main
.schema(&reader)?
.ok_or(Error::internal("Impossible to retrieve the schema"))?;
let mut search_builder = index.new_search(params.q.clone());
if let Some(offset) = params.offset {
search_builder.offset(offset);
}
if let Some(limit) = params.limit {
search_builder.limit(limit);
}
let available_attributes = schema.displayed_name();
let mut restricted_attributes: HashSet<&str>;
match &params.attributes_to_retrieve {
Some(attributes_to_retrieve) => {
let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect();
if attributes_to_retrieve.contains("*") {
restricted_attributes = available_attributes.clone();
} else {
restricted_attributes = HashSet::new();
for attr in attributes_to_retrieve {
if available_attributes.contains(attr) {
restricted_attributes.insert(attr);
search_builder.add_retrievable_field(attr.to_string());
} else {
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
}
}
}
},
None => {
restricted_attributes = available_attributes.clone();
}
}
if let Some(ref facet_filters) = params.facet_filters {
let attrs = index.main.attributes_for_faceting(&reader)?.unwrap_or_default();
search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, &attrs)?);
}
if let Some(facets) = &params.facets_distribution {
match index.main.attributes_for_faceting(&reader)? {
Some(ref attrs) => {
let field_ids = prepare_facet_list(&facets, &schema, attrs)?;
search_builder.add_facets(field_ids);
},
None => return Err(FacetCountError::NoFacetSet.into()),
}
}
if let Some(attributes_to_crop) = &params.attributes_to_crop {
let default_length = params.crop_length.unwrap_or(200);
let mut final_attributes: HashMap<String, usize> = HashMap::new();
for attribute in attributes_to_crop.split(',') {
let mut attribute = attribute.split(':');
let attr = attribute.next();
let length = attribute.next().and_then(|s| s.parse().ok()).unwrap_or(default_length);
match attr {
Some("*") => {
for attr in &restricted_attributes {
final_attributes.insert(attr.to_string(), length);
}
},
Some(attr) => {
if available_attributes.contains(attr) {
final_attributes.insert(attr.to_string(), length);
} else {
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
}
},
None => (),
}
}
search_builder.attributes_to_crop(final_attributes);
}
if let Some(attributes_to_highlight) = &params.attributes_to_highlight {
let mut final_attributes: HashSet<String> = HashSet::new();
for attribute in attributes_to_highlight.split(',') {
if attribute == "*" {
for attr in &restricted_attributes {
final_attributes.insert(attr.to_string());
}
} else {
if available_attributes.contains(attribute) {
final_attributes.insert(attribute.to_string());
} else {
warn!("The attributes {:?} present in attributesToHighlight parameter doesn't exist", attribute);
}
}
}
search_builder.attributes_to_highlight(final_attributes);
}
if let Some(filters) = &params.filters {
search_builder.filters(filters.to_string());
}
if let Some(matches) = params.matches {
if matches {
search_builder.get_matches();
}
}
let search_result = search_builder.search(&reader)?;
Ok(HttpResponse::Ok().json(search_result)) Ok(HttpResponse::Ok().json(search_result))
} }
#[derive(Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct SearchQueryPost {
q: String,
offset: Option<usize>,
limit: Option<usize>,
attributes_to_retrieve: Option<Vec<String>>,
attributes_to_crop: Option<Vec<String>>,
crop_length: Option<usize>,
attributes_to_highlight: Option<Vec<String>>,
filters: Option<String>,
matches: Option<bool>,
facet_filters: Option<Value>,
facets_distribution: Option<Vec<String>>,
}
impl From<SearchQueryPost> for SearchQuery {
fn from(other: SearchQueryPost) -> SearchQuery {
SearchQuery {
q: other.q,
offset: other.offset,
limit: other.limit,
attributes_to_retrieve: other.attributes_to_retrieve.map(|attrs| attrs.join(",")),
attributes_to_crop: other.attributes_to_crop.map(|attrs| attrs.join(",")),
crop_length: other.crop_length,
attributes_to_highlight: other.attributes_to_highlight.map(|attrs| attrs.join(",")),
filters: other.filters,
matches: other.matches,
facet_filters: other.facet_filters.map(|f| f.to_string()),
facets_distribution: other.facets_distribution.map(|f| format!("{:?}", f)),
}
}
}
#[post("/indexes/{index_uid}/search", wrap = "Authentication::Public")]
async fn search_with_post(
data: web::Data<Data>,
path: web::Path<IndexParam>,
params: web::Json<SearchQueryPost>,
) -> Result<HttpResponse, ResponseError> {
let query: SearchQuery = params.0.into();
let search_result = query.search(&path.index_uid, data)?;
Ok(HttpResponse::Ok().json(search_result))
}
impl SearchQuery {
fn search(&self, index_uid: &str, data: web::Data<Data>) -> Result<SearchResult, ResponseError> {
let index = data
.db
.open_index(index_uid)
.ok_or(Error::index_not_found(index_uid))?;
let reader = data.db.main_read_txn()?;
let schema = index
.main
.schema(&reader)?
.ok_or(Error::internal("Impossible to retrieve the schema"))?;
let mut search_builder = index.new_search(self.q.clone());
if let Some(offset) = self.offset {
search_builder.offset(offset);
}
if let Some(limit) = self.limit {
search_builder.limit(limit);
}
let available_attributes = schema.displayed_name();
let mut restricted_attributes: HashSet<&str>;
match &self.attributes_to_retrieve {
Some(attributes_to_retrieve) => {
let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect();
if attributes_to_retrieve.contains("*") {
restricted_attributes = available_attributes.clone();
} else {
restricted_attributes = HashSet::new();
for attr in attributes_to_retrieve {
if available_attributes.contains(attr) {
restricted_attributes.insert(attr);
search_builder.add_retrievable_field(attr.to_string());
} else {
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
}
}
}
},
None => {
restricted_attributes = available_attributes.clone();
}
}
if let Some(ref facet_filters) = self.facet_filters {
let attrs = index.main.attributes_for_faceting(&reader)?.unwrap_or_default();
search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, &attrs)?);
}
if let Some(facets) = &self.facets_distribution {
match index.main.attributes_for_faceting(&reader)? {
Some(ref attrs) => {
let field_ids = prepare_facet_list(&facets, &schema, attrs)?;
search_builder.add_facets(field_ids);
},
None => return Err(FacetCountError::NoFacetSet.into()),
}
}
if let Some(attributes_to_crop) = &self.attributes_to_crop {
let default_length = self.crop_length.unwrap_or(200);
let mut final_attributes: HashMap<String, usize> = HashMap::new();
for attribute in attributes_to_crop.split(',') {
let mut attribute = attribute.split(':');
let attr = attribute.next();
let length = attribute.next().and_then(|s| s.parse().ok()).unwrap_or(default_length);
match attr {
Some("*") => {
for attr in &restricted_attributes {
final_attributes.insert(attr.to_string(), length);
}
},
Some(attr) => {
if available_attributes.contains(attr) {
final_attributes.insert(attr.to_string(), length);
} else {
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
}
},
None => (),
}
}
search_builder.attributes_to_crop(final_attributes);
}
if let Some(attributes_to_highlight) = &self.attributes_to_highlight {
let mut final_attributes: HashSet<String> = HashSet::new();
for attribute in attributes_to_highlight.split(',') {
if attribute == "*" {
for attr in &restricted_attributes {
final_attributes.insert(attr.to_string());
}
} else {
if available_attributes.contains(attribute) {
final_attributes.insert(attribute.to_string());
} else {
warn!("The attributes {:?} present in attributesToHighlight parameter doesn't exist", attribute);
}
}
}
search_builder.attributes_to_highlight(final_attributes);
}
if let Some(filters) = &self.filters {
search_builder.filters(filters.to_string());
}
if let Some(matches) = self.matches {
if matches {
search_builder.get_matches();
}
}
search_builder.search(&reader)
}
}
/// Parses the incoming string into an array of attributes for which to return a count. It returns /// Parses the incoming string into an array of attributes for which to return a count. It returns
/// a Vec of attribute names ascociated with their id. /// a Vec of attribute names ascociated with their id.
/// ///

View File

@ -252,11 +252,16 @@ impl Server {
self.delete_request(&url).await self.delete_request(&url).await
} }
pub async fn search(&mut self, query: &str) -> (Value, StatusCode) { pub async fn search_get(&mut self, query: &str) -> (Value, StatusCode) {
let url = format!("/indexes/{}/search?{}", self.uid, query); let url = format!("/indexes/{}/search?{}", self.uid, query);
self.get_request(&url).await self.get_request(&url).await
} }
pub async fn search_post(&mut self, body: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/search", self.uid);
self.post_request(&url, body).await
}
pub async fn get_all_updates_status(&mut self) -> (Value, StatusCode) { pub async fn get_all_updates_status(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/updates", self.uid); let url = format!("/indexes/{}/updates", self.uid);
self.get_request(&url).await self.get_request(&url).await

View File

@ -629,7 +629,7 @@ async fn create_index_without_primary_key_and_search() {
let query = "q=captain&limit=3"; let query = "q=captain&limit=3";
let (response, status_code) = server.search(&query).await; let (response, status_code) = server.search_get(&query).await;
assert_eq!(status_code, 200); assert_eq!(status_code, 200);
assert_eq!(response["hits"].as_array().unwrap().len(), 0); assert_eq!(response["hits"].as_array().unwrap().len(), 0);
} }

File diff suppressed because it is too large Load Diff

View File

@ -106,7 +106,7 @@ async fn search_with_settings_basic() {
} }
]); ]);
let (response, _status_code) = server.search(query).await; let (response, _status_code) = server.search_get(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false); assert_json_eq!(expect, response["hits"].clone(), ordered: false);
} }
@ -212,7 +212,7 @@ async fn search_with_settings_stop_words() {
} }
]); ]);
let (response, _status_code) = server.search(query).await; let (response, _status_code) = server.search_get(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false); assert_json_eq!(expect, response["hits"].clone(), ordered: false);
} }
@ -323,7 +323,7 @@ async fn search_with_settings_synonyms() {
} }
]); ]);
let (response, _status_code) = server.search(query).await; let (response, _status_code) = server.search_get(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false); assert_json_eq!(expect, response["hits"].clone(), ordered: false);
} }
@ -429,7 +429,7 @@ async fn search_with_settings_ranking_rules() {
} }
]); ]);
let (response, _status_code) = server.search(query).await; let (response, _status_code) = server.search_get(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false); assert_json_eq!(expect, response["hits"].clone(), ordered: false);
} }
@ -534,7 +534,7 @@ async fn search_with_settings_searchable_attributes() {
} }
]); ]);
let (response, _status_code) = server.search(query).await; let (response, _status_code) = server.search_get(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false); assert_json_eq!(expect, response["hits"].clone(), ordered: false);
} }
@ -604,7 +604,7 @@ async fn search_with_settings_displayed_attributes() {
} }
]); ]);
let (response, _status_code) = server.search(query).await; let (response, _status_code) = server.search_get(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false); assert_json_eq!(expect, response["hits"].clone(), ordered: false);
} }
@ -674,6 +674,6 @@ async fn search_with_settings_searchable_attributes_2() {
} }
]); ]);
let (response, _status_code) = server.search(query).await; let (response, _status_code) = server.search_get(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false); assert_json_eq!(expect, response["hits"].clone(), ordered: false);
} }

View File

@ -45,12 +45,12 @@ async fn add_documents_and_stop_words() {
// 3 - Search for a document with stop words // 3 - Search for a document with stop words
let (response, _status_code) = server.search("q=the%20mask").await; let (response, _status_code) = server.search_get("q=the%20mask").await;
assert!(!response["hits"].as_array().unwrap().is_empty()); assert!(!response["hits"].as_array().unwrap().is_empty());
// 4 - Search for documents with *only* stop words // 4 - Search for documents with *only* stop words
let (response, _status_code) = server.search("q=the%20of").await; let (response, _status_code) = server.search_get("q=the%20of").await;
assert!(response["hits"].as_array().unwrap().is_empty()); assert!(response["hits"].as_array().unwrap().is_empty());
// 5 - Delete all stop words // 5 - Delete all stop words