mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-23 10:37:41 +08:00
Fix the tests
This commit is contained in:
parent
1d217cef19
commit
0578aff8c9
@ -2,8 +2,9 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
|||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_auth::IndexSearchRules;
|
use meilisearch_auth::IndexSearchRules;
|
||||||
use meilisearch_lib::index::{
|
use meilisearch_lib::index::{
|
||||||
SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
|
MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||||
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_HIT_PER_PAGE, DEFAULT_PAGE, MatchingStrategy
|
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||||
|
DEFAULT_SEARCH_OFFSET,
|
||||||
};
|
};
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
@ -27,12 +28,12 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
pub struct SearchQueryGet {
|
pub struct SearchQueryGet {
|
||||||
q: Option<String>,
|
q: Option<String>,
|
||||||
offset: Option<usize>,
|
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
|
||||||
limit: Option<usize>,
|
offset: usize,
|
||||||
#[serde(default = "DEFAULT_PAGE")]
|
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
|
||||||
page: usize,
|
limit: usize,
|
||||||
#[serde(default = "DEFAULT_HIT_PER_PAGE")]
|
page: Option<usize>,
|
||||||
hits_per_page: usize,
|
hits_per_page: Option<usize>,
|
||||||
attributes_to_retrieve: Option<CS<String>>,
|
attributes_to_retrieve: Option<CS<String>>,
|
||||||
attributes_to_crop: Option<CS<String>>,
|
attributes_to_crop: Option<CS<String>>,
|
||||||
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
pub use search::{
|
pub use search::{
|
||||||
HitsInfo, SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
HitsInfo, MatchingStrategy, SearchQuery, SearchResult, DEFAULT_CROP_LENGTH,
|
||||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_HIT_PER_PAGE, DEFAULT_PAGE,
|
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
||||||
DEFAULT_SEARCH_LIMIT, MatchingStrategy
|
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||||
};
|
};
|
||||||
pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecked};
|
pub use updates::{apply_settings_to_builder, Checked, Facets, Settings, Unchecked};
|
||||||
|
|
||||||
|
@ -21,13 +21,12 @@ use super::index::Index;
|
|||||||
pub type Document = serde_json::Map<String, Value>;
|
pub type Document = serde_json::Map<String, Value>;
|
||||||
type MatchesPosition = BTreeMap<String, Vec<MatchBounds>>;
|
type MatchesPosition = BTreeMap<String, Vec<MatchBounds>>;
|
||||||
|
|
||||||
|
pub const DEFAULT_SEARCH_OFFSET: fn() -> usize = || 0;
|
||||||
pub const DEFAULT_SEARCH_LIMIT: fn() -> usize = || 20;
|
pub const DEFAULT_SEARCH_LIMIT: fn() -> usize = || 20;
|
||||||
pub const DEFAULT_CROP_LENGTH: fn() -> usize = || 10;
|
pub const DEFAULT_CROP_LENGTH: fn() -> usize = || 10;
|
||||||
pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string();
|
pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string();
|
||||||
pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
|
pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
|
||||||
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
||||||
pub const DEFAULT_PAGE: fn() -> usize = || 1;
|
|
||||||
pub const DEFAULT_HIT_PER_PAGE: fn() -> usize = || 20;
|
|
||||||
|
|
||||||
/// The maximum number of results that the engine
|
/// The maximum number of results that the engine
|
||||||
/// will be able to return in one search call.
|
/// will be able to return in one search call.
|
||||||
@ -37,12 +36,12 @@ pub const DEFAULT_PAGINATION_MAX_TOTAL_HITS: usize = 1000;
|
|||||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||||
pub struct SearchQuery {
|
pub struct SearchQuery {
|
||||||
pub q: Option<String>,
|
pub q: Option<String>,
|
||||||
pub offset: Option<usize>,
|
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
|
||||||
pub limit: Option<usize>,
|
pub offset: usize,
|
||||||
#[serde(default = "DEFAULT_PAGE")]
|
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
|
||||||
pub page: usize,
|
pub limit: usize,
|
||||||
#[serde(default = "DEFAULT_HIT_PER_PAGE")]
|
pub page: Option<usize>,
|
||||||
pub hits_per_page: usize,
|
pub hits_per_page: Option<usize>,
|
||||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||||
pub attributes_to_crop: Option<Vec<String>>,
|
pub attributes_to_crop: Option<Vec<String>>,
|
||||||
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
||||||
@ -145,33 +144,26 @@ impl Index {
|
|||||||
.pagination_max_total_hits(&rtxn)?
|
.pagination_max_total_hits(&rtxn)?
|
||||||
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS);
|
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS);
|
||||||
|
|
||||||
// Make sure that a user can't get more documents than the hard limit,
|
let is_finite_pagination = query.page.or(query.hits_per_page).is_some();
|
||||||
// we align that on the offset too.
|
|
||||||
let is_finite_pagination = query.offset.is_none() && query.limit.is_none();
|
|
||||||
|
|
||||||
search.exhaustive_number_hits(is_finite_pagination);
|
search.exhaustive_number_hits(is_finite_pagination);
|
||||||
|
|
||||||
|
// compute the offset on the limit depending on the pagination mode.
|
||||||
let (offset, limit) = if is_finite_pagination {
|
let (offset, limit) = if is_finite_pagination {
|
||||||
match query.page.checked_sub(1) {
|
let limit = query.hits_per_page.unwrap_or_else(DEFAULT_SEARCH_LIMIT);
|
||||||
Some(page) => {
|
let page = query.page.unwrap_or(1);
|
||||||
let offset = min(query.hits_per_page * page, max_total_hits);
|
|
||||||
let limit = min(query.hits_per_page, max_total_hits.saturating_sub(offset));
|
|
||||||
|
|
||||||
(offset, limit)
|
// page 0 gives a limit of 0 forcing Meilisearch to return no document.
|
||||||
}
|
page.checked_sub(1).map_or((0, 0), |p| (limit * p, limit))
|
||||||
// page 0 returns 0 hits
|
|
||||||
None => (0, 0),
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
let offset = min(query.offset.unwrap_or(0), max_total_hits);
|
(query.offset, query.limit)
|
||||||
let limit = min(
|
|
||||||
query.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
|
|
||||||
max_total_hits.saturating_sub(offset),
|
|
||||||
);
|
|
||||||
|
|
||||||
(offset, limit)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Make sure that a user can't get more documents than the hard limit,
|
||||||
|
// we align that on the offset too.
|
||||||
|
let offset = min(offset, max_total_hits);
|
||||||
|
let limit = min(limit, max_total_hits.saturating_sub(offset));
|
||||||
|
|
||||||
search.offset(offset);
|
search.offset(offset);
|
||||||
search.limit(limit);
|
search.limit(limit);
|
||||||
|
|
||||||
@ -297,20 +289,21 @@ impl Index {
|
|||||||
|
|
||||||
let number_of_hits = min(candidates.len() as usize, max_total_hits);
|
let number_of_hits = min(candidates.len() as usize, max_total_hits);
|
||||||
let hits_info = if is_finite_pagination {
|
let hits_info = if is_finite_pagination {
|
||||||
|
let hits_per_page = query.hits_per_page.unwrap_or_else(DEFAULT_SEARCH_LIMIT);
|
||||||
// If hit_per_page is 0, then pages can't be computed and so we respond 0.
|
// If hit_per_page is 0, then pages can't be computed and so we respond 0.
|
||||||
let total_pages = (number_of_hits + query.hits_per_page.saturating_sub(1))
|
let total_pages = (number_of_hits + hits_per_page.saturating_sub(1))
|
||||||
.checked_div(query.hits_per_page)
|
.checked_div(hits_per_page)
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
|
|
||||||
HitsInfo::Pagination {
|
HitsInfo::Pagination {
|
||||||
hits_per_page: query.hits_per_page,
|
hits_per_page: hits_per_page,
|
||||||
page: query.page,
|
page: query.page.unwrap_or(1),
|
||||||
total_pages,
|
total_pages,
|
||||||
total_hits: number_of_hits,
|
total_hits: number_of_hits,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
HitsInfo::OffsetLimit {
|
HitsInfo::OffsetLimit {
|
||||||
limit: query.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
|
limit: query.limit,
|
||||||
offset,
|
offset,
|
||||||
estimated_total_hits: number_of_hits,
|
estimated_total_hits: number_of_hits,
|
||||||
}
|
}
|
||||||
|
@ -691,10 +691,10 @@ mod test {
|
|||||||
let index_uuid = Uuid::new_v4();
|
let index_uuid = Uuid::new_v4();
|
||||||
let query = SearchQuery {
|
let query = SearchQuery {
|
||||||
q: Some(String::from("hello world")),
|
q: Some(String::from("hello world")),
|
||||||
offset: Some(10),
|
offset: 10,
|
||||||
limit: Some(0),
|
limit: 0,
|
||||||
page: 1,
|
page: Some(1),
|
||||||
hits_per_page: 10,
|
hits_per_page: Some(10),
|
||||||
attributes_to_retrieve: Some(vec!["string".to_owned()].into_iter().collect()),
|
attributes_to_retrieve: Some(vec!["string".to_owned()].into_iter().collect()),
|
||||||
attributes_to_crop: None,
|
attributes_to_crop: None,
|
||||||
crop_length: 18,
|
crop_length: 18,
|
||||||
|
Loading…
Reference in New Issue
Block a user