diff --git a/meilisearch-http/src/helpers/meilisearch.rs b/meilisearch-http/src/helpers/meilisearch.rs index a925562be..d75fdcced 100644 --- a/meilisearch-http/src/helpers/meilisearch.rs +++ b/meilisearch-http/src/helpers/meilisearch.rs @@ -75,7 +75,7 @@ impl From for Error { }; let message = format!("parsing error on line {} at column {}: {}", line, column, e.variant.message()); - Error::FilterParsing(message) + Error::FilterParsing(message) }, _ => Error::Internal(error.to_string()), } @@ -257,7 +257,7 @@ impl<'a> SearchBuilder<'a> { for doc in docs { let mut document: IndexMap = self .index - .document(reader, Some(&all_attributes), doc.id) + .document(reader, Some(all_attributes.clone()), doc.id) .map_err(|e| Error::RetrieveDocument(doc.id.0, e.to_string()))? .ok_or(Error::DocumentNotFound(doc.id.0))?; diff --git a/meilisearch-http/src/helpers/mod.rs b/meilisearch-http/src/helpers/mod.rs index f42cb57c3..eb40f27e6 100644 --- a/meilisearch-http/src/helpers/mod.rs +++ b/meilisearch-http/src/helpers/mod.rs @@ -1,2 +1,2 @@ -// pub mod meilisearch; +pub mod meilisearch; // pub mod tide; diff --git a/meilisearch-http/src/routes/document.rs b/meilisearch-http/src/routes/document.rs index a28f9a73c..9c4b88142 100644 --- a/meilisearch-http/src/routes/document.rs +++ b/meilisearch-http/src/routes/document.rs @@ -45,7 +45,7 @@ pub async fn delete_document( documents_deletion.delete_document_by_id(document_id); let update_id = documents_deletion.finalize(&mut update_writer) - .map_err(|_| ResponseError::Internal(path.1.clone()))?; + .map_err(|err| ResponseError::Internal(err.to_string()))?; update_writer.commit() .map_err(|_| ResponseError::CommitTransaction)?; diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index 8851d26f3..adf4f0e63 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -6,7 +6,7 @@ pub mod document; pub mod health; // pub mod index; pub mod key; -// pub mod search; +pub mod search; // pub mod setting; // pub mod stats; // pub mod stop_words; diff --git a/meilisearch-http/src/routes/search.rs b/meilisearch-http/src/routes/search.rs index 4d547d268..2efd24580 100644 --- a/meilisearch-http/src/routes/search.rs +++ b/meilisearch-http/src/routes/search.rs @@ -6,12 +6,12 @@ use log::warn; use meilisearch_core::Index; use rayon::iter::{IntoParallelIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; -use tide::{Request, Response}; +use actix_web::*; -use crate::error::{ResponseError, SResult}; -use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit}; -use crate::helpers::tide::RequestExt; -use crate::helpers::tide::ACL::*; +use crate::error::ResponseError; +use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit, SearchResult}; +// use crate::helpers::tide::RequestExt; +// use crate::helpers::tide::ACL::*; use crate::Data; #[derive(Deserialize)] @@ -29,34 +29,37 @@ struct SearchQuery { matches: Option, } -pub async fn search_with_url_query(ctx: Request) -> SResult { - ctx.is_allowed(Public)?; +#[get("/indexes/{index_uid}/search")] +pub async fn search_with_url_query( + data: web::Data, + path: web::Path, + params: web::Query, +) -> Result> { - let index = ctx.index()?; - let db = &ctx.state().db; - let reader = db.main_read_txn()?; + let index = data.db.open_index(path.clone()) + .ok_or(ResponseError::IndexNotFound(path.clone()))?; + + let reader = data.db.main_read_txn() + .map_err(|_| ResponseError::CreateTransaction)?; let schema = index .main - .schema(&reader)? - .ok_or(ResponseError::open_index("No Schema found"))?; + .schema(&reader) + .map_err(|_| ResponseError::Schema)? + .ok_or(ResponseError::Schema)?; - let query: SearchQuery = ctx - .query() - .map_err(|_| ResponseError::bad_request("invalid query parameter"))?; + let mut search_builder = index.new_search(params.q.clone()); - let mut search_builder = index.new_search(query.q.clone()); - - if let Some(offset) = query.offset { + if let Some(offset) = params.offset { search_builder.offset(offset); } - if let Some(limit) = query.limit { + if let Some(limit) = params.limit { search_builder.limit(limit); } let available_attributes = schema.displayed_name(); let mut restricted_attributes: HashSet<&str>; - match &query.attributes_to_retrieve { + match ¶ms.attributes_to_retrieve { Some(attributes_to_retrieve) => { let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect(); if attributes_to_retrieve.contains("*") { @@ -78,8 +81,8 @@ pub async fn search_with_url_query(ctx: Request) -> SResult { } } - if let Some(attributes_to_crop) = query.attributes_to_crop { - let default_length = query.crop_length.unwrap_or(200); + if let Some(attributes_to_crop) = ¶ms.attributes_to_crop { + let default_length = params.crop_length.unwrap_or(200); let mut final_attributes: HashMap = HashMap::new(); for attribute in attributes_to_crop.split(',') { @@ -106,7 +109,7 @@ pub async fn search_with_url_query(ctx: Request) -> SResult { search_builder.attributes_to_crop(final_attributes); } - if let Some(attributes_to_highlight) = query.attributes_to_highlight { + if let Some(attributes_to_highlight) = ¶ms.attributes_to_highlight { let mut final_attributes: HashSet = HashSet::new(); for attribute in attributes_to_highlight.split(',') { if attribute == "*" { @@ -125,15 +128,15 @@ pub async fn search_with_url_query(ctx: Request) -> SResult { search_builder.attributes_to_highlight(final_attributes); } - if let Some(filters) = query.filters { - search_builder.filters(filters); + if let Some(filters) = ¶ms.filters { + search_builder.filters(filters.to_string()); } - if let Some(timeout_ms) = query.timeout_ms { + if let Some(timeout_ms) = params.timeout_ms { search_builder.timeout(Duration::from_millis(timeout_ms)); } - if let Some(matches) = query.matches { + if let Some(matches) = params.matches { if matches { search_builder.get_matches(); } @@ -141,11 +144,11 @@ pub async fn search_with_url_query(ctx: Request) -> SResult { let response = match search_builder.search(&reader) { Ok(response) => response, - Err(Error::Internal(message)) => return Err(ResponseError::Internal(message)), - Err(others) => return Err(ResponseError::bad_request(others)), + Err(Error::Internal(message)) => return Err(ResponseError::Internal(message))?, + Err(others) => return Err(ResponseError::BadRequest(others.to_string()))?, }; - Ok(tide::Response::new(200).body_json(&response).unwrap()) + Ok(web::Json(response)) } #[derive(Clone, Deserialize)] @@ -174,24 +177,24 @@ struct SearchMultiBodyResponse { query: String, } -pub async fn search_multi_index(mut ctx: Request) -> SResult { - ctx.is_allowed(Public)?; - let body = ctx - .body_json::() - .await - .map_err(ResponseError::bad_request)?; +#[post("/indexes/search")] +pub async fn search_multi_index( + data: web::Data, + body: web::Json, +) -> Result> { let mut index_list = body.clone().indexes; for index in index_list.clone() { if index == "*" { - index_list = ctx.state().db.indexes_uids().into_iter().collect(); + index_list = data.db.indexes_uids().into_iter().collect(); break; } } let mut offset = 0; let mut count = 20; + let query = body.query.clone(); if let Some(body_offset) = body.offset { if let Some(limit) = body.limit { @@ -200,16 +203,12 @@ pub async fn search_multi_index(mut ctx: Request) -> SResult { } } - let offset = offset; - let count = count; - let db = &ctx.state().db; + let par_body = body.clone(); - let responses_per_index: Vec> = index_list + let responses_per_index: Vec<(String, SearchResult)> = index_list .into_par_iter() .map(move |index_uid| { - let index: Index = db - .open_index(&index_uid) - .ok_or(ResponseError::index_not_found(&index_uid))?; + let index = data.db.open_index(&index_uid).unwrap(); let mut search_builder = index.new_search(par_body.query.clone()); @@ -237,9 +236,10 @@ pub async fn search_multi_index(mut ctx: Request) -> SResult { } } - let reader = db.main_read_txn()?; - let response = search_builder.search(&reader)?; - Ok((index_uid, response)) + let reader = data.db.main_read_txn().unwrap(); + let response = search_builder.search(&reader).unwrap(); + + (index_uid, response) }) .collect(); @@ -247,13 +247,11 @@ pub async fn search_multi_index(mut ctx: Request) -> SResult { let mut max_query_time = 0; - for response in responses_per_index { - if let Ok((index_uid, response)) = response { - if response.processing_time_ms > max_query_time { - max_query_time = response.processing_time_ms; - } - hits_map.insert(index_uid, response.hits); + for (index_uid, response) in responses_per_index { + if response.processing_time_ms > max_query_time { + max_query_time = response.processing_time_ms; } + hits_map.insert(index_uid, response.hits); } let response = SearchMultiBodyResponse { @@ -261,8 +259,8 @@ pub async fn search_multi_index(mut ctx: Request) -> SResult { offset, hits_per_page: count, processing_time_ms: max_query_time, - query: body.query, + query, }; - Ok(tide::Response::new(200).body_json(&response).unwrap()) + Ok(web::Json(response)) }