mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-23 18:45:06 +08:00
Merge #318
318: Revert "Sort at query time" r=Kerollmops a=curquiza Reverts meilisearch/milli#309 We revert this from `main` not because this leads to a bug, but because we don't want to release it now and we have to merge and release an hotfix on `main`. Cf: - https://github.com/meilisearch/milli/issues/316 - https://github.com/meilisearch/milli/pull/317 Once the v0.21.0 is released, we should merge again this awesome addition 👌 Co-authored-by: Clémentine Urquizar <clementine@meilisearch.com>
This commit is contained in:
commit
0d1f83ba4b
@ -52,9 +52,9 @@ fn bench_songs(c: &mut criterion::Criterion) {
|
|||||||
milli::default_criteria().iter().map(|criteria| criteria.to_string()).collect();
|
milli::default_criteria().iter().map(|criteria| criteria.to_string()).collect();
|
||||||
let default_criterion = default_criterion.iter().map(|s| s.as_str());
|
let default_criterion = default_criterion.iter().map(|s| s.as_str());
|
||||||
let asc_default: Vec<&str> =
|
let asc_default: Vec<&str> =
|
||||||
std::iter::once("released-timestamp:asc").chain(default_criterion.clone()).collect();
|
std::iter::once("asc(released-timestamp)").chain(default_criterion.clone()).collect();
|
||||||
let desc_default: Vec<&str> =
|
let desc_default: Vec<&str> =
|
||||||
std::iter::once("released-timestamp:desc").chain(default_criterion.clone()).collect();
|
std::iter::once("desc(released-timestamp)").chain(default_criterion.clone()).collect();
|
||||||
|
|
||||||
let basic_with_quote: Vec<String> = BASE_CONF
|
let basic_with_quote: Vec<String> = BASE_CONF
|
||||||
.queries
|
.queries
|
||||||
@ -118,12 +118,12 @@ fn bench_songs(c: &mut criterion::Criterion) {
|
|||||||
},
|
},
|
||||||
utils::Conf {
|
utils::Conf {
|
||||||
group_name: "asc",
|
group_name: "asc",
|
||||||
criterion: Some(&["released-timestamp:desc"]),
|
criterion: Some(&["asc(released-timestamp)"]),
|
||||||
..BASE_CONF
|
..BASE_CONF
|
||||||
},
|
},
|
||||||
utils::Conf {
|
utils::Conf {
|
||||||
group_name: "desc",
|
group_name: "desc",
|
||||||
criterion: Some(&["released-timestamp:desc"]),
|
criterion: Some(&["desc(released-timestamp)"]),
|
||||||
..BASE_CONF
|
..BASE_CONF
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -1030,7 +1030,7 @@ mod tests {
|
|||||||
displayed_attributes: Setting::Set(vec!["name".to_string()]),
|
displayed_attributes: Setting::Set(vec!["name".to_string()]),
|
||||||
searchable_attributes: Setting::Set(vec!["age".to_string()]),
|
searchable_attributes: Setting::Set(vec!["age".to_string()]),
|
||||||
filterable_attributes: Setting::Set(hashset! { "age".to_string() }),
|
filterable_attributes: Setting::Set(hashset! { "age".to_string() }),
|
||||||
criteria: Setting::Set(vec!["age:asc".to_string()]),
|
criteria: Setting::Set(vec!["asc(age)".to_string()]),
|
||||||
stop_words: Setting::Set(btreeset! { "and".to_string() }),
|
stop_words: Setting::Set(btreeset! { "and".to_string() }),
|
||||||
synonyms: Setting::Set(hashmap! { "alex".to_string() => vec!["alexey".to_string()] }),
|
synonyms: Setting::Set(hashmap! { "alex".to_string() => vec!["alexey".to_string()] }),
|
||||||
};
|
};
|
||||||
@ -1058,7 +1058,7 @@ mod tests {
|
|||||||
Token::Str("criteria"),
|
Token::Str("criteria"),
|
||||||
Token::Some,
|
Token::Some,
|
||||||
Token::Seq { len: Some(1) },
|
Token::Seq { len: Some(1) },
|
||||||
Token::Str("age:asc"),
|
Token::Str("asc(age)"),
|
||||||
Token::SeqEnd,
|
Token::SeqEnd,
|
||||||
Token::Str("stopWords"),
|
Token::Str("stopWords"),
|
||||||
Token::Some,
|
Token::Some,
|
||||||
|
@ -25,6 +25,7 @@ obkv = "0.2.0"
|
|||||||
once_cell = "1.5.2"
|
once_cell = "1.5.2"
|
||||||
ordered-float = "2.1.1"
|
ordered-float = "2.1.1"
|
||||||
rayon = "1.5.0"
|
rayon = "1.5.0"
|
||||||
|
regex = "1.4.3"
|
||||||
roaring = "0.6.6"
|
roaring = "0.6.6"
|
||||||
serde = { version = "1.0.123", features = ["derive"] }
|
serde = { version = "1.0.123", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.62", features = ["preserve_order"] }
|
serde_json = { version = "1.0.62", features = ["preserve_order"] }
|
||||||
|
@ -1,10 +1,15 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use regex::Regex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::error::{Error, UserError};
|
use crate::error::{Error, UserError};
|
||||||
|
|
||||||
|
static ASC_DESC_REGEX: Lazy<Regex> =
|
||||||
|
Lazy::new(|| Regex::new(r#"(asc|desc)\(([\w_-]+)\)"#).unwrap());
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||||
pub enum Criterion {
|
pub enum Criterion {
|
||||||
/// Sorted by decreasing number of matched query terms.
|
/// Sorted by decreasing number of matched query terms.
|
||||||
@ -12,13 +17,10 @@ pub enum Criterion {
|
|||||||
Words,
|
Words,
|
||||||
/// Sorted by increasing number of typos.
|
/// Sorted by increasing number of typos.
|
||||||
Typo,
|
Typo,
|
||||||
/// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable
|
|
||||||
/// attributes can be used in place of this criterion at query time.
|
|
||||||
Sort,
|
|
||||||
/// Sorted by increasing distance between matched query terms.
|
/// Sorted by increasing distance between matched query terms.
|
||||||
Proximity,
|
Proximity,
|
||||||
/// Documents with quey words contained in more important
|
/// Documents with quey words contained in more important
|
||||||
/// attributes are considered better.
|
/// attributes are considred better.
|
||||||
Attribute,
|
Attribute,
|
||||||
/// Sorted by the similarity of the matched words with the query words.
|
/// Sorted by the similarity of the matched words with the query words.
|
||||||
Exactness,
|
Exactness,
|
||||||
@ -41,46 +43,29 @@ impl Criterion {
|
|||||||
impl FromStr for Criterion {
|
impl FromStr for Criterion {
|
||||||
type Err = Error;
|
type Err = Error;
|
||||||
|
|
||||||
fn from_str(text: &str) -> Result<Criterion, Self::Err> {
|
fn from_str(txt: &str) -> Result<Criterion, Self::Err> {
|
||||||
match text {
|
match txt {
|
||||||
"words" => Ok(Criterion::Words),
|
"words" => Ok(Criterion::Words),
|
||||||
"typo" => Ok(Criterion::Typo),
|
"typo" => Ok(Criterion::Typo),
|
||||||
"sort" => Ok(Criterion::Sort),
|
|
||||||
"proximity" => Ok(Criterion::Proximity),
|
"proximity" => Ok(Criterion::Proximity),
|
||||||
"attribute" => Ok(Criterion::Attribute),
|
"attribute" => Ok(Criterion::Attribute),
|
||||||
"exactness" => Ok(Criterion::Exactness),
|
"exactness" => Ok(Criterion::Exactness),
|
||||||
text => match AscDesc::from_str(text) {
|
text => {
|
||||||
Ok(AscDesc::Asc(field)) => Ok(Criterion::Asc(field)),
|
let caps = ASC_DESC_REGEX
|
||||||
Ok(AscDesc::Desc(field)) => Ok(Criterion::Desc(field)),
|
.captures(text)
|
||||||
Err(error) => Err(error.into()),
|
.ok_or_else(|| UserError::InvalidCriterionName { name: text.to_string() })?;
|
||||||
},
|
let order = caps.get(1).unwrap().as_str();
|
||||||
}
|
let field_name = caps.get(2).unwrap().as_str();
|
||||||
}
|
match order {
|
||||||
}
|
"asc" => Ok(Criterion::Asc(field_name.to_string())),
|
||||||
|
"desc" => Ok(Criterion::Desc(field_name.to_string())),
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
text => {
|
||||||
pub enum AscDesc {
|
return Err(
|
||||||
Asc(String),
|
UserError::InvalidCriterionName { name: text.to_string() }.into()
|
||||||
Desc(String),
|
)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
impl AscDesc {
|
}
|
||||||
pub fn field(&self) -> &str {
|
|
||||||
match self {
|
|
||||||
AscDesc::Asc(field) => field,
|
|
||||||
AscDesc::Desc(field) => field,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for AscDesc {
|
|
||||||
type Err = UserError;
|
|
||||||
|
|
||||||
fn from_str(text: &str) -> Result<AscDesc, Self::Err> {
|
|
||||||
match text.rsplit_once(':') {
|
|
||||||
Some((field_name, "asc")) => Ok(AscDesc::Asc(field_name.to_string())),
|
|
||||||
Some((field_name, "desc")) => Ok(AscDesc::Desc(field_name.to_string())),
|
|
||||||
_ => Err(UserError::InvalidCriterionName { name: text.to_string() }),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -89,7 +74,6 @@ pub fn default_criteria() -> Vec<Criterion> {
|
|||||||
vec![
|
vec![
|
||||||
Criterion::Words,
|
Criterion::Words,
|
||||||
Criterion::Typo,
|
Criterion::Typo,
|
||||||
Criterion::Sort,
|
|
||||||
Criterion::Proximity,
|
Criterion::Proximity,
|
||||||
Criterion::Attribute,
|
Criterion::Attribute,
|
||||||
Criterion::Exactness,
|
Criterion::Exactness,
|
||||||
@ -103,12 +87,11 @@ impl fmt::Display for Criterion {
|
|||||||
match self {
|
match self {
|
||||||
Words => f.write_str("words"),
|
Words => f.write_str("words"),
|
||||||
Typo => f.write_str("typo"),
|
Typo => f.write_str("typo"),
|
||||||
Sort => f.write_str("sort"),
|
|
||||||
Proximity => f.write_str("proximity"),
|
Proximity => f.write_str("proximity"),
|
||||||
Attribute => f.write_str("attribute"),
|
Attribute => f.write_str("attribute"),
|
||||||
Exactness => f.write_str("exactness"),
|
Exactness => f.write_str("exactness"),
|
||||||
Asc(attr) => write!(f, "{}:asc", attr),
|
Asc(attr) => write!(f, "asc({})", attr),
|
||||||
Desc(attr) => write!(f, "{}:desc", attr),
|
Desc(attr) => write!(f, "desc({})", attr),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -58,7 +58,6 @@ pub enum UserError {
|
|||||||
InvalidFacetsDistribution { invalid_facets_name: HashSet<String> },
|
InvalidFacetsDistribution { invalid_facets_name: HashSet<String> },
|
||||||
InvalidFilter(pest::error::Error<ParserRule>),
|
InvalidFilter(pest::error::Error<ParserRule>),
|
||||||
InvalidFilterAttribute(pest::error::Error<ParserRule>),
|
InvalidFilterAttribute(pest::error::Error<ParserRule>),
|
||||||
InvalidSortableAttribute { field: String, valid_fields: HashSet<String> },
|
|
||||||
InvalidStoreFile,
|
InvalidStoreFile,
|
||||||
MaxDatabaseSizeReached,
|
MaxDatabaseSizeReached,
|
||||||
MissingDocumentId { document: Object },
|
MissingDocumentId { document: Object },
|
||||||
@ -227,15 +226,6 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
Self::InvalidFilterAttribute(error) => error.fmt(f),
|
Self::InvalidFilterAttribute(error) => error.fmt(f),
|
||||||
Self::InvalidSortableAttribute { field, valid_fields } => {
|
|
||||||
let valid_names =
|
|
||||||
valid_fields.iter().map(AsRef::as_ref).collect::<Vec<_>>().join(", ");
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"Attribute {} is not sortable, available sortable attributes are: {}",
|
|
||||||
field, valid_names
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Self::MissingDocumentId { document } => {
|
Self::MissingDocumentId { document } => {
|
||||||
let json = serde_json::to_string(document).unwrap();
|
let json = serde_json::to_string(document).unwrap();
|
||||||
write!(f, "document doesn't have an identifier {}", json)
|
write!(f, "document doesn't have an identifier {}", json)
|
||||||
|
@ -28,7 +28,6 @@ pub mod main_key {
|
|||||||
pub const DISTINCT_FIELD_KEY: &str = "distinct-field-key";
|
pub const DISTINCT_FIELD_KEY: &str = "distinct-field-key";
|
||||||
pub const DOCUMENTS_IDS_KEY: &str = "documents-ids";
|
pub const DOCUMENTS_IDS_KEY: &str = "documents-ids";
|
||||||
pub const FILTERABLE_FIELDS_KEY: &str = "filterable-fields";
|
pub const FILTERABLE_FIELDS_KEY: &str = "filterable-fields";
|
||||||
pub const SORTABLE_FIELDS_KEY: &str = "sortable-fields";
|
|
||||||
pub const FIELD_DISTRIBUTION_KEY: &str = "fields-distribution";
|
pub const FIELD_DISTRIBUTION_KEY: &str = "fields-distribution";
|
||||||
pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map";
|
pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map";
|
||||||
pub const HARD_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "hard-external-documents-ids";
|
pub const HARD_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "hard-external-documents-ids";
|
||||||
@ -447,45 +446,13 @@ impl Index {
|
|||||||
Ok(fields_ids)
|
Ok(fields_ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* sortable fields */
|
|
||||||
|
|
||||||
/// Writes the sortable fields names in the database.
|
|
||||||
pub(crate) fn put_sortable_fields(
|
|
||||||
&self,
|
|
||||||
wtxn: &mut RwTxn,
|
|
||||||
fields: &HashSet<String>,
|
|
||||||
) -> heed::Result<()> {
|
|
||||||
self.main.put::<_, Str, SerdeJson<_>>(wtxn, main_key::SORTABLE_FIELDS_KEY, fields)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Deletes the sortable fields ids in the database.
|
|
||||||
pub(crate) fn delete_sortable_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
|
||||||
self.main.delete::<_, Str>(wtxn, main_key::SORTABLE_FIELDS_KEY)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the sortable fields names.
|
|
||||||
pub fn sortable_fields(&self, rtxn: &RoTxn) -> heed::Result<HashSet<String>> {
|
|
||||||
Ok(self
|
|
||||||
.main
|
|
||||||
.get::<_, Str, SerdeJson<_>>(rtxn, main_key::SORTABLE_FIELDS_KEY)?
|
|
||||||
.unwrap_or_default())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Identical to `sortable_fields`, but returns ids instead.
|
|
||||||
pub fn sortable_fields_ids(&self, rtxn: &RoTxn) -> Result<HashSet<FieldId>> {
|
|
||||||
let fields = self.sortable_fields(rtxn)?;
|
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
|
||||||
Ok(fields.into_iter().filter_map(|name| fields_ids_map.id(&name)).collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
/* faceted documents ids */
|
/* faceted documents ids */
|
||||||
|
|
||||||
/// Returns the faceted fields names.
|
/// Returns the faceted fields names.
|
||||||
///
|
///
|
||||||
/// Faceted fields are the union of all the filterable, sortable, distinct, and Asc/Desc fields.
|
/// Faceted fields are the union of all the filterable, distinct, and Asc/Desc fields.
|
||||||
pub fn faceted_fields(&self, rtxn: &RoTxn) -> Result<HashSet<String>> {
|
pub fn faceted_fields(&self, rtxn: &RoTxn) -> Result<HashSet<String>> {
|
||||||
let filterable_fields = self.filterable_fields(rtxn)?;
|
let filterable_fields = self.filterable_fields(rtxn)?;
|
||||||
let sortable_fields = self.sortable_fields(rtxn)?;
|
|
||||||
let distinct_field = self.distinct_field(rtxn)?;
|
let distinct_field = self.distinct_field(rtxn)?;
|
||||||
let asc_desc_fields =
|
let asc_desc_fields =
|
||||||
self.criteria(rtxn)?.into_iter().filter_map(|criterion| match criterion {
|
self.criteria(rtxn)?.into_iter().filter_map(|criterion| match criterion {
|
||||||
@ -494,7 +461,6 @@ impl Index {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let mut faceted_fields = filterable_fields;
|
let mut faceted_fields = filterable_fields;
|
||||||
faceted_fields.extend(sortable_fields);
|
|
||||||
faceted_fields.extend(asc_desc_fields);
|
faceted_fields.extend(asc_desc_fields);
|
||||||
if let Some(field) = distinct_field {
|
if let Some(field) = distinct_field {
|
||||||
faceted_fields.insert(field.to_owned());
|
faceted_fields.insert(field.to_owned());
|
||||||
|
@ -22,7 +22,7 @@ use std::result::Result as StdResult;
|
|||||||
use fxhash::{FxHasher32, FxHasher64};
|
use fxhash::{FxHasher32, FxHasher64};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
|
|
||||||
pub use self::criterion::{default_criteria, AscDesc, Criterion};
|
pub use self::criterion::{default_criteria, Criterion};
|
||||||
pub use self::error::{
|
pub use self::error::{
|
||||||
Error, FieldIdMapMissingEntry, InternalError, SerializationError, UserError,
|
Error, FieldIdMapMissingEntry, InternalError, SerializationError, UserError,
|
||||||
};
|
};
|
||||||
|
@ -7,7 +7,7 @@ use roaring::RoaringBitmap;
|
|||||||
|
|
||||||
use super::{Criterion, CriterionParameters, CriterionResult};
|
use super::{Criterion, CriterionParameters, CriterionResult};
|
||||||
use crate::search::criteria::{resolve_query_tree, CriteriaBuilder};
|
use crate::search::criteria::{resolve_query_tree, CriteriaBuilder};
|
||||||
use crate::search::facet::{FacetNumberIter, FacetStringIter};
|
use crate::search::facet::FacetNumberIter;
|
||||||
use crate::search::query_tree::Operation;
|
use crate::search::query_tree::Operation;
|
||||||
use crate::{FieldId, Index, Result};
|
use crate::{FieldId, Index, Result};
|
||||||
|
|
||||||
@ -20,7 +20,7 @@ pub struct AscDesc<'t> {
|
|||||||
rtxn: &'t heed::RoTxn<'t>,
|
rtxn: &'t heed::RoTxn<'t>,
|
||||||
field_name: String,
|
field_name: String,
|
||||||
field_id: Option<FieldId>,
|
field_id: Option<FieldId>,
|
||||||
is_ascending: bool,
|
ascending: bool,
|
||||||
query_tree: Option<Operation>,
|
query_tree: Option<Operation>,
|
||||||
candidates: Box<dyn Iterator<Item = heed::Result<RoaringBitmap>> + 't>,
|
candidates: Box<dyn Iterator<Item = heed::Result<RoaringBitmap>> + 't>,
|
||||||
allowed_candidates: RoaringBitmap,
|
allowed_candidates: RoaringBitmap,
|
||||||
@ -53,16 +53,12 @@ impl<'t> AscDesc<'t> {
|
|||||||
rtxn: &'t heed::RoTxn,
|
rtxn: &'t heed::RoTxn,
|
||||||
parent: Box<dyn Criterion + 't>,
|
parent: Box<dyn Criterion + 't>,
|
||||||
field_name: String,
|
field_name: String,
|
||||||
is_ascending: bool,
|
ascending: bool,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||||
let field_id = fields_ids_map.id(&field_name);
|
let field_id = fields_ids_map.id(&field_name);
|
||||||
let faceted_candidates = match field_id {
|
let faceted_candidates = match field_id {
|
||||||
Some(field_id) => {
|
Some(field_id) => index.number_faceted_documents_ids(rtxn, field_id)?,
|
||||||
let number_faceted = index.number_faceted_documents_ids(rtxn, field_id)?;
|
|
||||||
let string_faceted = index.string_faceted_documents_ids(rtxn, field_id)?;
|
|
||||||
number_faceted | string_faceted
|
|
||||||
}
|
|
||||||
None => RoaringBitmap::default(),
|
None => RoaringBitmap::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -71,7 +67,7 @@ impl<'t> AscDesc<'t> {
|
|||||||
rtxn,
|
rtxn,
|
||||||
field_name,
|
field_name,
|
||||||
field_id,
|
field_id,
|
||||||
is_ascending,
|
ascending,
|
||||||
query_tree: None,
|
query_tree: None,
|
||||||
candidates: Box::new(std::iter::empty()),
|
candidates: Box::new(std::iter::empty()),
|
||||||
allowed_candidates: RoaringBitmap::new(),
|
allowed_candidates: RoaringBitmap::new(),
|
||||||
@ -91,7 +87,7 @@ impl<'t> Criterion for AscDesc<'t> {
|
|||||||
loop {
|
loop {
|
||||||
debug!(
|
debug!(
|
||||||
"Facet {}({}) iteration",
|
"Facet {}({}) iteration",
|
||||||
if self.is_ascending { "Asc" } else { "Desc" },
|
if self.ascending { "Asc" } else { "Desc" },
|
||||||
self.field_name
|
self.field_name
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -140,7 +136,7 @@ impl<'t> Criterion for AscDesc<'t> {
|
|||||||
self.index,
|
self.index,
|
||||||
self.rtxn,
|
self.rtxn,
|
||||||
field_id,
|
field_id,
|
||||||
self.is_ascending,
|
self.ascending,
|
||||||
candidates & &self.faceted_candidates,
|
candidates & &self.faceted_candidates,
|
||||||
)?,
|
)?,
|
||||||
None => Box::new(std::iter::empty()),
|
None => Box::new(std::iter::empty()),
|
||||||
@ -171,49 +167,31 @@ fn facet_ordered<'t>(
|
|||||||
index: &'t Index,
|
index: &'t Index,
|
||||||
rtxn: &'t heed::RoTxn,
|
rtxn: &'t heed::RoTxn,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
is_ascending: bool,
|
ascending: bool,
|
||||||
candidates: RoaringBitmap,
|
candidates: RoaringBitmap,
|
||||||
) -> Result<Box<dyn Iterator<Item = heed::Result<RoaringBitmap>> + 't>> {
|
) -> Result<Box<dyn Iterator<Item = heed::Result<RoaringBitmap>> + 't>> {
|
||||||
if candidates.len() <= CANDIDATES_THRESHOLD {
|
if candidates.len() <= CANDIDATES_THRESHOLD {
|
||||||
let number_iter = iterative_facet_number_ordered_iter(
|
let iter = iterative_facet_ordered_iter(index, rtxn, field_id, ascending, candidates)?;
|
||||||
index,
|
Ok(Box::new(iter.map(Ok)) as Box<dyn Iterator<Item = _>>)
|
||||||
rtxn,
|
|
||||||
field_id,
|
|
||||||
is_ascending,
|
|
||||||
candidates.clone(),
|
|
||||||
)?;
|
|
||||||
let string_iter =
|
|
||||||
iterative_facet_string_ordered_iter(index, rtxn, field_id, is_ascending, candidates)?;
|
|
||||||
Ok(Box::new(number_iter.chain(string_iter).map(Ok)) as Box<dyn Iterator<Item = _>>)
|
|
||||||
} else {
|
} else {
|
||||||
let facet_number_fn = if is_ascending {
|
let facet_fn = if ascending {
|
||||||
FacetNumberIter::new_reducing
|
FacetNumberIter::new_reducing
|
||||||
} else {
|
} else {
|
||||||
FacetNumberIter::new_reverse_reducing
|
FacetNumberIter::new_reverse_reducing
|
||||||
};
|
};
|
||||||
let number_iter = facet_number_fn(rtxn, index, field_id, candidates.clone())?
|
let iter = facet_fn(rtxn, index, field_id, candidates)?;
|
||||||
.map(|res| res.map(|(_, docids)| docids));
|
Ok(Box::new(iter.map(|res| res.map(|(_, docids)| docids))))
|
||||||
|
|
||||||
let facet_string_fn = if is_ascending {
|
|
||||||
FacetStringIter::new_reducing
|
|
||||||
} else {
|
|
||||||
FacetStringIter::new_reverse_reducing
|
|
||||||
};
|
|
||||||
let string_iter = facet_string_fn(rtxn, index, field_id, candidates)?
|
|
||||||
.map(|res| res.map(|(_, _, docids)| docids));
|
|
||||||
|
|
||||||
Ok(Box::new(number_iter.chain(string_iter)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch the whole list of candidates facet number values one by one and order them by it.
|
/// Fetch the whole list of candidates facet values one by one and order them by it.
|
||||||
///
|
///
|
||||||
/// This function is fast when the amount of candidates to rank is small.
|
/// This function is fast when the amount of candidates to rank is small.
|
||||||
fn iterative_facet_number_ordered_iter<'t>(
|
fn iterative_facet_ordered_iter<'t>(
|
||||||
index: &'t Index,
|
index: &'t Index,
|
||||||
rtxn: &'t heed::RoTxn,
|
rtxn: &'t heed::RoTxn,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
is_ascending: bool,
|
ascending: bool,
|
||||||
candidates: RoaringBitmap,
|
candidates: RoaringBitmap,
|
||||||
) -> Result<impl Iterator<Item = RoaringBitmap> + 't> {
|
) -> Result<impl Iterator<Item = RoaringBitmap> + 't> {
|
||||||
let mut docids_values = Vec::with_capacity(candidates.len() as usize);
|
let mut docids_values = Vec::with_capacity(candidates.len() as usize);
|
||||||
@ -221,14 +199,14 @@ fn iterative_facet_number_ordered_iter<'t>(
|
|||||||
let left = (field_id, docid, f64::MIN);
|
let left = (field_id, docid, f64::MIN);
|
||||||
let right = (field_id, docid, f64::MAX);
|
let right = (field_id, docid, f64::MAX);
|
||||||
let mut iter = index.field_id_docid_facet_f64s.range(rtxn, &(left..=right))?;
|
let mut iter = index.field_id_docid_facet_f64s.range(rtxn, &(left..=right))?;
|
||||||
let entry = if is_ascending { iter.next() } else { iter.last() };
|
let entry = if ascending { iter.next() } else { iter.last() };
|
||||||
if let Some(((_, _, value), ())) = entry.transpose()? {
|
if let Some(((_, _, value), ())) = entry.transpose()? {
|
||||||
docids_values.push((docid, OrderedFloat(value)));
|
docids_values.push((docid, OrderedFloat(value)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
docids_values.sort_unstable_by_key(|(_, v)| *v);
|
docids_values.sort_unstable_by_key(|(_, v)| *v);
|
||||||
let iter = docids_values.into_iter();
|
let iter = docids_values.into_iter();
|
||||||
let iter = if is_ascending {
|
let iter = if ascending {
|
||||||
Box::new(iter) as Box<dyn Iterator<Item = _>>
|
Box::new(iter) as Box<dyn Iterator<Item = _>>
|
||||||
} else {
|
} else {
|
||||||
Box::new(iter.rev())
|
Box::new(iter.rev())
|
||||||
@ -238,49 +216,7 @@ fn iterative_facet_number_ordered_iter<'t>(
|
|||||||
// required to collect the result into an owned collection (a Vec).
|
// required to collect the result into an owned collection (a Vec).
|
||||||
// https://github.com/rust-itertools/itertools/issues/499
|
// https://github.com/rust-itertools/itertools/issues/499
|
||||||
let vec: Vec<_> = iter
|
let vec: Vec<_> = iter
|
||||||
.group_by(|(_, v)| *v)
|
.group_by(|(_, v)| v.clone())
|
||||||
.into_iter()
|
|
||||||
.map(|(_, ids)| ids.map(|(id, _)| id).collect())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(vec.into_iter())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch the whole list of candidates facet string values one by one and order them by it.
|
|
||||||
///
|
|
||||||
/// This function is fast when the amount of candidates to rank is small.
|
|
||||||
fn iterative_facet_string_ordered_iter<'t>(
|
|
||||||
index: &'t Index,
|
|
||||||
rtxn: &'t heed::RoTxn,
|
|
||||||
field_id: FieldId,
|
|
||||||
is_ascending: bool,
|
|
||||||
candidates: RoaringBitmap,
|
|
||||||
) -> Result<impl Iterator<Item = RoaringBitmap> + 't> {
|
|
||||||
let mut docids_values = Vec::with_capacity(candidates.len() as usize);
|
|
||||||
for docid in candidates.iter() {
|
|
||||||
let left = (field_id, docid, "");
|
|
||||||
let right = (field_id, docid.saturating_add(1), "");
|
|
||||||
// FIXME Doing this means that it will never be possible to retrieve
|
|
||||||
// the document with id 2^32, not sure this is a real problem.
|
|
||||||
let mut iter = index.field_id_docid_facet_strings.range(rtxn, &(left..right))?;
|
|
||||||
let entry = if is_ascending { iter.next() } else { iter.last() };
|
|
||||||
if let Some(((_, _, value), _)) = entry.transpose()? {
|
|
||||||
docids_values.push((docid, value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
docids_values.sort_unstable_by_key(|(_, v)| *v);
|
|
||||||
let iter = docids_values.into_iter();
|
|
||||||
let iter = if is_ascending {
|
|
||||||
Box::new(iter) as Box<dyn Iterator<Item = _>>
|
|
||||||
} else {
|
|
||||||
Box::new(iter.rev())
|
|
||||||
};
|
|
||||||
|
|
||||||
// The itertools GroupBy iterator doesn't provide an owned version, we are therefore
|
|
||||||
// required to collect the result into an owned collection (a Vec).
|
|
||||||
// https://github.com/rust-itertools/itertools/issues/499
|
|
||||||
let vec: Vec<_> = iter
|
|
||||||
.group_by(|(_, v)| *v)
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(_, ids)| ids.map(|(id, _)| id).collect())
|
.map(|(_, ids)| ids.map(|(id, _)| id).collect())
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -12,7 +12,6 @@ use self::r#final::Final;
|
|||||||
use self::typo::Typo;
|
use self::typo::Typo;
|
||||||
use self::words::Words;
|
use self::words::Words;
|
||||||
use super::query_tree::{Operation, PrimitiveQueryPart, Query, QueryKind};
|
use super::query_tree::{Operation, PrimitiveQueryPart, Query, QueryKind};
|
||||||
use crate::criterion::AscDesc as AscDescName;
|
|
||||||
use crate::search::{word_derivations, WordDerivationsCache};
|
use crate::search::{word_derivations, WordDerivationsCache};
|
||||||
use crate::{DocumentId, FieldId, Index, Result, TreeLevel};
|
use crate::{DocumentId, FieldId, Index, Result, TreeLevel};
|
||||||
|
|
||||||
@ -274,7 +273,6 @@ impl<'t> CriteriaBuilder<'t> {
|
|||||||
query_tree: Option<Operation>,
|
query_tree: Option<Operation>,
|
||||||
primitive_query: Option<Vec<PrimitiveQueryPart>>,
|
primitive_query: Option<Vec<PrimitiveQueryPart>>,
|
||||||
filtered_candidates: Option<RoaringBitmap>,
|
filtered_candidates: Option<RoaringBitmap>,
|
||||||
sort_criteria: Option<Vec<AscDescName>>,
|
|
||||||
) -> Result<Final<'t>> {
|
) -> Result<Final<'t>> {
|
||||||
use crate::criterion::Criterion as Name;
|
use crate::criterion::Criterion as Name;
|
||||||
|
|
||||||
@ -284,30 +282,8 @@ impl<'t> CriteriaBuilder<'t> {
|
|||||||
Box::new(Initial::new(query_tree, filtered_candidates)) as Box<dyn Criterion>;
|
Box::new(Initial::new(query_tree, filtered_candidates)) as Box<dyn Criterion>;
|
||||||
for name in self.index.criteria(&self.rtxn)? {
|
for name in self.index.criteria(&self.rtxn)? {
|
||||||
criterion = match name {
|
criterion = match name {
|
||||||
Name::Words => Box::new(Words::new(self, criterion)),
|
|
||||||
Name::Typo => Box::new(Typo::new(self, criterion)),
|
Name::Typo => Box::new(Typo::new(self, criterion)),
|
||||||
Name::Sort => match sort_criteria {
|
Name::Words => Box::new(Words::new(self, criterion)),
|
||||||
Some(ref sort_criteria) => {
|
|
||||||
for asc_desc in sort_criteria {
|
|
||||||
criterion = match asc_desc {
|
|
||||||
AscDescName::Asc(field) => Box::new(AscDesc::asc(
|
|
||||||
&self.index,
|
|
||||||
&self.rtxn,
|
|
||||||
criterion,
|
|
||||||
field.to_string(),
|
|
||||||
)?),
|
|
||||||
AscDescName::Desc(field) => Box::new(AscDesc::desc(
|
|
||||||
&self.index,
|
|
||||||
&self.rtxn,
|
|
||||||
criterion,
|
|
||||||
field.to_string(),
|
|
||||||
)?),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
criterion
|
|
||||||
}
|
|
||||||
None => criterion,
|
|
||||||
},
|
|
||||||
Name::Proximity => Box::new(Proximity::new(self, criterion)),
|
Name::Proximity => Box::new(Proximity::new(self, criterion)),
|
||||||
Name::Attribute => Box::new(Attribute::new(self, criterion)),
|
Name::Attribute => Box::new(Attribute::new(self, criterion)),
|
||||||
Name::Exactness => Box::new(Exactness::new(self, criterion, &primitive_query)?),
|
Name::Exactness => Box::new(Exactness::new(self, criterion, &primitive_query)?),
|
||||||
|
@ -131,7 +131,7 @@ use std::ops::Bound::{Excluded, Included, Unbounded};
|
|||||||
|
|
||||||
use either::{Either, Left, Right};
|
use either::{Either, Left, Right};
|
||||||
use heed::types::{ByteSlice, DecodeIgnore};
|
use heed::types::{ByteSlice, DecodeIgnore};
|
||||||
use heed::{Database, LazyDecode, RoRange, RoRevRange};
|
use heed::{Database, LazyDecode, RoRange};
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
|
||||||
use crate::heed_codec::facet::{
|
use crate::heed_codec::facet::{
|
||||||
@ -206,65 +206,6 @@ impl<'t> Iterator for FacetStringGroupRange<'t> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FacetStringGroupRevRange<'t> {
|
|
||||||
iter: RoRevRange<
|
|
||||||
't,
|
|
||||||
FacetLevelValueU32Codec,
|
|
||||||
LazyDecode<FacetStringZeroBoundsValueCodec<CboRoaringBitmapCodec>>,
|
|
||||||
>,
|
|
||||||
end: Bound<u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'t> FacetStringGroupRevRange<'t> {
|
|
||||||
pub fn new<X, Y>(
|
|
||||||
rtxn: &'t heed::RoTxn,
|
|
||||||
db: Database<X, Y>,
|
|
||||||
field_id: FieldId,
|
|
||||||
level: NonZeroU8,
|
|
||||||
left: Bound<u32>,
|
|
||||||
right: Bound<u32>,
|
|
||||||
) -> heed::Result<FacetStringGroupRevRange<'t>> {
|
|
||||||
let db = db.remap_types::<
|
|
||||||
FacetLevelValueU32Codec,
|
|
||||||
FacetStringZeroBoundsValueCodec<CboRoaringBitmapCodec>,
|
|
||||||
>();
|
|
||||||
let left_bound = match left {
|
|
||||||
Included(left) => Included((field_id, level, left, u32::MIN)),
|
|
||||||
Excluded(left) => Excluded((field_id, level, left, u32::MIN)),
|
|
||||||
Unbounded => Included((field_id, level, u32::MIN, u32::MIN)),
|
|
||||||
};
|
|
||||||
let right_bound = Included((field_id, level, u32::MAX, u32::MAX));
|
|
||||||
let iter = db.lazily_decode_data().rev_range(rtxn, &(left_bound, right_bound))?;
|
|
||||||
Ok(FacetStringGroupRevRange { iter, end: right })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'t> Iterator for FacetStringGroupRevRange<'t> {
|
|
||||||
type Item = heed::Result<((NonZeroU8, u32, u32), (Option<(&'t str, &'t str)>, RoaringBitmap))>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
match self.iter.next() {
|
|
||||||
Some(Ok(((_fid, level, left, right), docids))) => {
|
|
||||||
let must_be_returned = match self.end {
|
|
||||||
Included(end) => right <= end,
|
|
||||||
Excluded(end) => right < end,
|
|
||||||
Unbounded => true,
|
|
||||||
};
|
|
||||||
if must_be_returned {
|
|
||||||
match docids.decode() {
|
|
||||||
Ok((bounds, docids)) => Some(Ok(((level, left, right), (bounds, docids)))),
|
|
||||||
Err(e) => Some(Err(e)),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(Err(e)) => Some(Err(e)),
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An iterator that is used to explore the level 0 of the facets string database.
|
/// An iterator that is used to explore the level 0 of the facets string database.
|
||||||
///
|
///
|
||||||
/// It yields the facet string and the roaring bitmap associated with it.
|
/// It yields the facet string and the roaring bitmap associated with it.
|
||||||
@ -339,81 +280,6 @@ impl<'t> Iterator for FacetStringLevelZeroRange<'t> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FacetStringLevelZeroRevRange<'t> {
|
|
||||||
iter: RoRevRange<
|
|
||||||
't,
|
|
||||||
FacetStringLevelZeroCodec,
|
|
||||||
FacetStringLevelZeroValueCodec<CboRoaringBitmapCodec>,
|
|
||||||
>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'t> FacetStringLevelZeroRevRange<'t> {
|
|
||||||
pub fn new<X, Y>(
|
|
||||||
rtxn: &'t heed::RoTxn,
|
|
||||||
db: Database<X, Y>,
|
|
||||||
field_id: FieldId,
|
|
||||||
left: Bound<&str>,
|
|
||||||
right: Bound<&str>,
|
|
||||||
) -> heed::Result<FacetStringLevelZeroRevRange<'t>> {
|
|
||||||
fn encode_value<'a>(buffer: &'a mut Vec<u8>, field_id: FieldId, value: &str) -> &'a [u8] {
|
|
||||||
buffer.extend_from_slice(&field_id.to_be_bytes());
|
|
||||||
buffer.push(0);
|
|
||||||
buffer.extend_from_slice(value.as_bytes());
|
|
||||||
&buffer[..]
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut left_buffer = Vec::new();
|
|
||||||
let left_bound = match left {
|
|
||||||
Included(value) => Included(encode_value(&mut left_buffer, field_id, value)),
|
|
||||||
Excluded(value) => Excluded(encode_value(&mut left_buffer, field_id, value)),
|
|
||||||
Unbounded => {
|
|
||||||
left_buffer.extend_from_slice(&field_id.to_be_bytes());
|
|
||||||
left_buffer.push(0);
|
|
||||||
Included(&left_buffer[..])
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut right_buffer = Vec::new();
|
|
||||||
let right_bound = match right {
|
|
||||||
Included(value) => Included(encode_value(&mut right_buffer, field_id, value)),
|
|
||||||
Excluded(value) => Excluded(encode_value(&mut right_buffer, field_id, value)),
|
|
||||||
Unbounded => {
|
|
||||||
right_buffer.extend_from_slice(&field_id.to_be_bytes());
|
|
||||||
right_buffer.push(1); // we must only get the level 0
|
|
||||||
Excluded(&right_buffer[..])
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let iter = db
|
|
||||||
.remap_key_type::<ByteSlice>()
|
|
||||||
.rev_range(rtxn, &(left_bound, right_bound))?
|
|
||||||
.remap_types::<
|
|
||||||
FacetStringLevelZeroCodec,
|
|
||||||
FacetStringLevelZeroValueCodec<CboRoaringBitmapCodec>
|
|
||||||
>();
|
|
||||||
|
|
||||||
Ok(FacetStringLevelZeroRevRange { iter })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'t> Iterator for FacetStringLevelZeroRevRange<'t> {
|
|
||||||
type Item = heed::Result<(&'t str, &'t str, RoaringBitmap)>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
match self.iter.next() {
|
|
||||||
Some(Ok(((_fid, normalized), (original, docids)))) => {
|
|
||||||
Some(Ok((normalized, original, docids)))
|
|
||||||
}
|
|
||||||
Some(Err(e)) => Some(Err(e)),
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type EitherStringRange<'t> = Either<FacetStringGroupRange<'t>, FacetStringLevelZeroRange<'t>>;
|
|
||||||
type EitherStringRevRange<'t> =
|
|
||||||
Either<FacetStringGroupRevRange<'t>, FacetStringLevelZeroRevRange<'t>>;
|
|
||||||
|
|
||||||
/// An iterator that is used to explore the facet strings level by level,
|
/// An iterator that is used to explore the facet strings level by level,
|
||||||
/// it will only return facets strings that are associated with the
|
/// it will only return facets strings that are associated with the
|
||||||
/// candidates documents ids given.
|
/// candidates documents ids given.
|
||||||
@ -421,45 +287,12 @@ pub struct FacetStringIter<'t> {
|
|||||||
rtxn: &'t heed::RoTxn<'t>,
|
rtxn: &'t heed::RoTxn<'t>,
|
||||||
db: Database<ByteSlice, ByteSlice>,
|
db: Database<ByteSlice, ByteSlice>,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
level_iters: Vec<(RoaringBitmap, Either<EitherStringRange<'t>, EitherStringRevRange<'t>>)>,
|
level_iters:
|
||||||
|
Vec<(RoaringBitmap, Either<FacetStringGroupRange<'t>, FacetStringLevelZeroRange<'t>>)>,
|
||||||
must_reduce: bool,
|
must_reduce: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t> FacetStringIter<'t> {
|
impl<'t> FacetStringIter<'t> {
|
||||||
pub fn new_reducing(
|
|
||||||
rtxn: &'t heed::RoTxn,
|
|
||||||
index: &'t Index,
|
|
||||||
field_id: FieldId,
|
|
||||||
documents_ids: RoaringBitmap,
|
|
||||||
) -> heed::Result<FacetStringIter<'t>> {
|
|
||||||
let db = index.facet_id_string_docids.remap_types::<ByteSlice, ByteSlice>();
|
|
||||||
let highest_iter = Self::highest_iter(rtxn, index, db, field_id)?;
|
|
||||||
Ok(FacetStringIter {
|
|
||||||
rtxn,
|
|
||||||
db,
|
|
||||||
field_id,
|
|
||||||
level_iters: vec![(documents_ids, Left(highest_iter))],
|
|
||||||
must_reduce: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_reverse_reducing(
|
|
||||||
rtxn: &'t heed::RoTxn,
|
|
||||||
index: &'t Index,
|
|
||||||
field_id: FieldId,
|
|
||||||
documents_ids: RoaringBitmap,
|
|
||||||
) -> heed::Result<FacetStringIter<'t>> {
|
|
||||||
let db = index.facet_id_string_docids.remap_types::<ByteSlice, ByteSlice>();
|
|
||||||
let highest_reverse_iter = Self::highest_reverse_iter(rtxn, index, db, field_id)?;
|
|
||||||
Ok(FacetStringIter {
|
|
||||||
rtxn,
|
|
||||||
db,
|
|
||||||
field_id,
|
|
||||||
level_iters: vec![(documents_ids, Right(highest_reverse_iter))],
|
|
||||||
must_reduce: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_non_reducing(
|
pub fn new_non_reducing(
|
||||||
rtxn: &'t heed::RoTxn,
|
rtxn: &'t heed::RoTxn,
|
||||||
index: &'t Index,
|
index: &'t Index,
|
||||||
@ -467,12 +300,30 @@ impl<'t> FacetStringIter<'t> {
|
|||||||
documents_ids: RoaringBitmap,
|
documents_ids: RoaringBitmap,
|
||||||
) -> heed::Result<FacetStringIter<'t>> {
|
) -> heed::Result<FacetStringIter<'t>> {
|
||||||
let db = index.facet_id_string_docids.remap_types::<ByteSlice, ByteSlice>();
|
let db = index.facet_id_string_docids.remap_types::<ByteSlice, ByteSlice>();
|
||||||
let highest_iter = Self::highest_iter(rtxn, index, db, field_id)?;
|
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
||||||
|
let highest_iter = match NonZeroU8::new(highest_level) {
|
||||||
|
Some(highest_level) => Left(FacetStringGroupRange::new(
|
||||||
|
rtxn,
|
||||||
|
index.facet_id_string_docids,
|
||||||
|
field_id,
|
||||||
|
highest_level,
|
||||||
|
Unbounded,
|
||||||
|
Unbounded,
|
||||||
|
)?),
|
||||||
|
None => Right(FacetStringLevelZeroRange::new(
|
||||||
|
rtxn,
|
||||||
|
index.facet_id_string_docids,
|
||||||
|
field_id,
|
||||||
|
Unbounded,
|
||||||
|
Unbounded,
|
||||||
|
)?),
|
||||||
|
};
|
||||||
|
|
||||||
Ok(FacetStringIter {
|
Ok(FacetStringIter {
|
||||||
rtxn,
|
rtxn,
|
||||||
db,
|
db,
|
||||||
field_id,
|
field_id,
|
||||||
level_iters: vec![(documents_ids, Left(highest_iter))],
|
level_iters: vec![(documents_ids, highest_iter)],
|
||||||
must_reduce: false,
|
must_reduce: false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -489,62 +340,6 @@ impl<'t> FacetStringIter<'t> {
|
|||||||
.transpose()?
|
.transpose()?
|
||||||
.map(|(key_bytes, _)| key_bytes[2])) // the level is the third bit
|
.map(|(key_bytes, _)| key_bytes[2])) // the level is the third bit
|
||||||
}
|
}
|
||||||
|
|
||||||
fn highest_iter<X, Y>(
|
|
||||||
rtxn: &'t heed::RoTxn,
|
|
||||||
index: &'t Index,
|
|
||||||
db: Database<X, Y>,
|
|
||||||
field_id: FieldId,
|
|
||||||
) -> heed::Result<Either<FacetStringGroupRange<'t>, FacetStringLevelZeroRange<'t>>> {
|
|
||||||
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
|
||||||
match NonZeroU8::new(highest_level) {
|
|
||||||
Some(highest_level) => FacetStringGroupRange::new(
|
|
||||||
rtxn,
|
|
||||||
index.facet_id_string_docids,
|
|
||||||
field_id,
|
|
||||||
highest_level,
|
|
||||||
Unbounded,
|
|
||||||
Unbounded,
|
|
||||||
)
|
|
||||||
.map(Left),
|
|
||||||
None => FacetStringLevelZeroRange::new(
|
|
||||||
rtxn,
|
|
||||||
index.facet_id_string_docids,
|
|
||||||
field_id,
|
|
||||||
Unbounded,
|
|
||||||
Unbounded,
|
|
||||||
)
|
|
||||||
.map(Right),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn highest_reverse_iter<X, Y>(
|
|
||||||
rtxn: &'t heed::RoTxn,
|
|
||||||
index: &'t Index,
|
|
||||||
db: Database<X, Y>,
|
|
||||||
field_id: FieldId,
|
|
||||||
) -> heed::Result<Either<FacetStringGroupRevRange<'t>, FacetStringLevelZeroRevRange<'t>>> {
|
|
||||||
let highest_level = Self::highest_level(rtxn, db, field_id)?.unwrap_or(0);
|
|
||||||
match NonZeroU8::new(highest_level) {
|
|
||||||
Some(highest_level) => FacetStringGroupRevRange::new(
|
|
||||||
rtxn,
|
|
||||||
index.facet_id_string_docids,
|
|
||||||
field_id,
|
|
||||||
highest_level,
|
|
||||||
Unbounded,
|
|
||||||
Unbounded,
|
|
||||||
)
|
|
||||||
.map(Left),
|
|
||||||
None => FacetStringLevelZeroRevRange::new(
|
|
||||||
rtxn,
|
|
||||||
index.facet_id_string_docids,
|
|
||||||
field_id,
|
|
||||||
Unbounded,
|
|
||||||
Unbounded,
|
|
||||||
)
|
|
||||||
.map(Right),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t> Iterator for FacetStringIter<'t> {
|
impl<'t> Iterator for FacetStringIter<'t> {
|
||||||
@ -553,21 +348,6 @@ impl<'t> Iterator for FacetStringIter<'t> {
|
|||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
'outer: loop {
|
'outer: loop {
|
||||||
let (documents_ids, last) = self.level_iters.last_mut()?;
|
let (documents_ids, last) = self.level_iters.last_mut()?;
|
||||||
let is_ascending = last.is_left();
|
|
||||||
|
|
||||||
// We remap the different iterator types to make
|
|
||||||
// the algorithm less complex to understand.
|
|
||||||
let last = match last {
|
|
||||||
Left(ascending) => match ascending {
|
|
||||||
Left(last) => Left(Left(last)),
|
|
||||||
Right(last) => Right(Left(last)),
|
|
||||||
},
|
|
||||||
Right(descending) => match descending {
|
|
||||||
Left(last) => Left(Right(last)),
|
|
||||||
Right(last) => Right(Right(last)),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
match last {
|
match last {
|
||||||
Left(last) => {
|
Left(last) => {
|
||||||
for result in last {
|
for result in last {
|
||||||
@ -579,50 +359,24 @@ impl<'t> Iterator for FacetStringIter<'t> {
|
|||||||
*documents_ids -= &docids;
|
*documents_ids -= &docids;
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = if is_ascending {
|
let result = match string_bounds {
|
||||||
match string_bounds {
|
Some((left, right)) => FacetStringLevelZeroRange::new(
|
||||||
Some((left, right)) => {
|
self.rtxn,
|
||||||
FacetStringLevelZeroRevRange::new(
|
self.db,
|
||||||
self.rtxn,
|
self.field_id,
|
||||||
self.db,
|
Included(left),
|
||||||
self.field_id,
|
Included(right),
|
||||||
Included(left),
|
)
|
||||||
Included(right),
|
.map(Right),
|
||||||
)
|
None => FacetStringGroupRange::new(
|
||||||
.map(Right)
|
self.rtxn,
|
||||||
}
|
self.db,
|
||||||
None => FacetStringGroupRevRange::new(
|
self.field_id,
|
||||||
self.rtxn,
|
NonZeroU8::new(level.get() - 1).unwrap(),
|
||||||
self.db,
|
Included(left),
|
||||||
self.field_id,
|
Included(right),
|
||||||
NonZeroU8::new(level.get() - 1).unwrap(),
|
)
|
||||||
Included(left),
|
.map(Left),
|
||||||
Included(right),
|
|
||||||
)
|
|
||||||
.map(Left),
|
|
||||||
}
|
|
||||||
.map(Right)
|
|
||||||
} else {
|
|
||||||
match string_bounds {
|
|
||||||
Some((left, right)) => FacetStringLevelZeroRange::new(
|
|
||||||
self.rtxn,
|
|
||||||
self.db,
|
|
||||||
self.field_id,
|
|
||||||
Included(left),
|
|
||||||
Included(right),
|
|
||||||
)
|
|
||||||
.map(Right),
|
|
||||||
None => FacetStringGroupRange::new(
|
|
||||||
self.rtxn,
|
|
||||||
self.db,
|
|
||||||
self.field_id,
|
|
||||||
NonZeroU8::new(level.get() - 1).unwrap(),
|
|
||||||
Included(left),
|
|
||||||
Included(right),
|
|
||||||
)
|
|
||||||
.map(Left),
|
|
||||||
}
|
|
||||||
.map(Left)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
|
@ -18,8 +18,6 @@ pub(crate) use self::facet::ParserRule;
|
|||||||
pub use self::facet::{FacetDistribution, FacetNumberIter, FilterCondition, Operator};
|
pub use self::facet::{FacetDistribution, FacetNumberIter, FilterCondition, Operator};
|
||||||
pub use self::matching_words::MatchingWords;
|
pub use self::matching_words::MatchingWords;
|
||||||
use self::query_tree::QueryTreeBuilder;
|
use self::query_tree::QueryTreeBuilder;
|
||||||
use crate::criterion::AscDesc;
|
|
||||||
use crate::error::UserError;
|
|
||||||
use crate::search::criteria::r#final::{Final, FinalResult};
|
use crate::search::criteria::r#final::{Final, FinalResult};
|
||||||
use crate::{DocumentId, Index, Result};
|
use crate::{DocumentId, Index, Result};
|
||||||
|
|
||||||
@ -39,7 +37,6 @@ pub struct Search<'a> {
|
|||||||
filter: Option<FilterCondition>,
|
filter: Option<FilterCondition>,
|
||||||
offset: usize,
|
offset: usize,
|
||||||
limit: usize,
|
limit: usize,
|
||||||
sort_criteria: Option<Vec<AscDesc>>,
|
|
||||||
optional_words: bool,
|
optional_words: bool,
|
||||||
authorize_typos: bool,
|
authorize_typos: bool,
|
||||||
words_limit: usize,
|
words_limit: usize,
|
||||||
@ -54,7 +51,6 @@ impl<'a> Search<'a> {
|
|||||||
filter: None,
|
filter: None,
|
||||||
offset: 0,
|
offset: 0,
|
||||||
limit: 20,
|
limit: 20,
|
||||||
sort_criteria: None,
|
|
||||||
optional_words: true,
|
optional_words: true,
|
||||||
authorize_typos: true,
|
authorize_typos: true,
|
||||||
words_limit: 10,
|
words_limit: 10,
|
||||||
@ -78,11 +74,6 @@ impl<'a> Search<'a> {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sort_criteria(&mut self, criteria: Vec<AscDesc>) -> &mut Search<'a> {
|
|
||||||
self.sort_criteria = Some(criteria);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn optional_words(&mut self, value: bool) -> &mut Search<'a> {
|
pub fn optional_words(&mut self, value: bool) -> &mut Search<'a> {
|
||||||
self.optional_words = value;
|
self.optional_words = value;
|
||||||
self
|
self
|
||||||
@ -143,29 +134,8 @@ impl<'a> Search<'a> {
|
|||||||
None => MatchingWords::default(),
|
None => MatchingWords::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// We check that we are allowed to use the sort criteria, we check
|
|
||||||
// that they are declared in the sortable fields.
|
|
||||||
let sortable_fields = self.index.sortable_fields(self.rtxn)?;
|
|
||||||
if let Some(sort_criteria) = &self.sort_criteria {
|
|
||||||
for asc_desc in sort_criteria {
|
|
||||||
let field = asc_desc.field();
|
|
||||||
if !sortable_fields.contains(field) {
|
|
||||||
return Err(UserError::InvalidSortableAttribute {
|
|
||||||
field: field.to_string(),
|
|
||||||
valid_fields: sortable_fields,
|
|
||||||
}
|
|
||||||
.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let criteria_builder = criteria::CriteriaBuilder::new(self.rtxn, self.index)?;
|
let criteria_builder = criteria::CriteriaBuilder::new(self.rtxn, self.index)?;
|
||||||
let criteria = criteria_builder.build(
|
let criteria = criteria_builder.build(query_tree, primitive_query, filtered_candidates)?;
|
||||||
query_tree,
|
|
||||||
primitive_query,
|
|
||||||
filtered_candidates,
|
|
||||||
self.sort_criteria.clone(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
match self.index.distinct_field(self.rtxn)? {
|
match self.index.distinct_field(self.rtxn)? {
|
||||||
None => self.perform_sort(NoopDistinct, matching_words, criteria),
|
None => self.perform_sort(NoopDistinct, matching_words, criteria),
|
||||||
@ -229,7 +199,6 @@ impl fmt::Debug for Search<'_> {
|
|||||||
filter,
|
filter,
|
||||||
offset,
|
offset,
|
||||||
limit,
|
limit,
|
||||||
sort_criteria,
|
|
||||||
optional_words,
|
optional_words,
|
||||||
authorize_typos,
|
authorize_typos,
|
||||||
words_limit,
|
words_limit,
|
||||||
@ -241,7 +210,6 @@ impl fmt::Debug for Search<'_> {
|
|||||||
.field("filter", filter)
|
.field("filter", filter)
|
||||||
.field("offset", offset)
|
.field("offset", offset)
|
||||||
.field("limit", limit)
|
.field("limit", limit)
|
||||||
.field("sort_criteria", sort_criteria)
|
|
||||||
.field("optional_words", optional_words)
|
.field("optional_words", optional_words)
|
||||||
.field("authorize_typos", authorize_typos)
|
.field("authorize_typos", authorize_typos)
|
||||||
.field("words_limit", words_limit)
|
.field("words_limit", words_limit)
|
||||||
|
@ -75,7 +75,6 @@ pub struct Settings<'a, 't, 'u, 'i> {
|
|||||||
searchable_fields: Setting<Vec<String>>,
|
searchable_fields: Setting<Vec<String>>,
|
||||||
displayed_fields: Setting<Vec<String>>,
|
displayed_fields: Setting<Vec<String>>,
|
||||||
filterable_fields: Setting<HashSet<String>>,
|
filterable_fields: Setting<HashSet<String>>,
|
||||||
sortable_fields: Setting<HashSet<String>>,
|
|
||||||
criteria: Setting<Vec<String>>,
|
criteria: Setting<Vec<String>>,
|
||||||
stop_words: Setting<BTreeSet<String>>,
|
stop_words: Setting<BTreeSet<String>>,
|
||||||
distinct_field: Setting<String>,
|
distinct_field: Setting<String>,
|
||||||
@ -103,7 +102,6 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
searchable_fields: Setting::NotSet,
|
searchable_fields: Setting::NotSet,
|
||||||
displayed_fields: Setting::NotSet,
|
displayed_fields: Setting::NotSet,
|
||||||
filterable_fields: Setting::NotSet,
|
filterable_fields: Setting::NotSet,
|
||||||
sortable_fields: Setting::NotSet,
|
|
||||||
criteria: Setting::NotSet,
|
criteria: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
distinct_field: Setting::NotSet,
|
distinct_field: Setting::NotSet,
|
||||||
@ -137,10 +135,6 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
self.filterable_fields = Setting::Set(names);
|
self.filterable_fields = Setting::Set(names);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_sortable_fields(&mut self, names: HashSet<String>) {
|
|
||||||
self.sortable_fields = Setting::Set(names);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reset_criteria(&mut self) {
|
pub fn reset_criteria(&mut self) {
|
||||||
self.criteria = Setting::Reset;
|
self.criteria = Setting::Reset;
|
||||||
}
|
}
|
||||||
@ -398,23 +392,6 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_sortable(&mut self) -> Result<()> {
|
|
||||||
match self.sortable_fields {
|
|
||||||
Setting::Set(ref fields) => {
|
|
||||||
let mut new_fields = HashSet::new();
|
|
||||||
for name in fields {
|
|
||||||
new_fields.insert(name.clone());
|
|
||||||
}
|
|
||||||
self.index.put_sortable_fields(self.wtxn, &new_fields)?;
|
|
||||||
}
|
|
||||||
Setting::Reset => {
|
|
||||||
self.index.delete_sortable_fields(self.wtxn)?;
|
|
||||||
}
|
|
||||||
Setting::NotSet => (),
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_criteria(&mut self) -> Result<()> {
|
fn update_criteria(&mut self) -> Result<()> {
|
||||||
match self.criteria {
|
match self.criteria {
|
||||||
Setting::Set(ref fields) => {
|
Setting::Set(ref fields) => {
|
||||||
@ -469,7 +446,6 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
|
|||||||
|
|
||||||
self.update_displayed()?;
|
self.update_displayed()?;
|
||||||
self.update_filterable()?;
|
self.update_filterable()?;
|
||||||
self.update_sortable()?;
|
|
||||||
self.update_distinct_field()?;
|
self.update_distinct_field()?;
|
||||||
self.update_criteria()?;
|
self.update_criteria()?;
|
||||||
self.update_primary_key()?;
|
self.update_primary_key()?;
|
||||||
@ -743,7 +719,7 @@ mod tests {
|
|||||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||||
// Don't display the generated `id` field.
|
// Don't display the generated `id` field.
|
||||||
builder.set_displayed_fields(vec![S("name")]);
|
builder.set_displayed_fields(vec![S("name")]);
|
||||||
builder.set_criteria(vec![S("age:asc")]);
|
builder.set_criteria(vec![S("asc(age)")]);
|
||||||
builder.execute(|_, _| ()).unwrap();
|
builder.execute(|_, _| ()).unwrap();
|
||||||
|
|
||||||
// Then index some documents.
|
// Then index some documents.
|
||||||
@ -977,7 +953,7 @@ mod tests {
|
|||||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||||
builder.set_displayed_fields(vec!["hello".to_string()]);
|
builder.set_displayed_fields(vec!["hello".to_string()]);
|
||||||
builder.set_filterable_fields(hashset! { S("age"), S("toto") });
|
builder.set_filterable_fields(hashset! { S("age"), S("toto") });
|
||||||
builder.set_criteria(vec!["toto:asc".to_string()]);
|
builder.set_criteria(vec!["asc(toto)".to_string()]);
|
||||||
builder.execute(|_, _| ()).unwrap();
|
builder.execute(|_, _| ()).unwrap();
|
||||||
wtxn.commit().unwrap();
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
@ -1014,7 +990,7 @@ mod tests {
|
|||||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||||
builder.set_displayed_fields(vec!["hello".to_string()]);
|
builder.set_displayed_fields(vec!["hello".to_string()]);
|
||||||
// It is only Asc(toto), there is a facet database but it is denied to filter with toto.
|
// It is only Asc(toto), there is a facet database but it is denied to filter with toto.
|
||||||
builder.set_criteria(vec!["toto:asc".to_string()]);
|
builder.set_criteria(vec!["asc(toto)".to_string()]);
|
||||||
builder.execute(|_, _| ()).unwrap();
|
builder.execute(|_, _| ()).unwrap();
|
||||||
wtxn.commit().unwrap();
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
{"id":"A","word_rank":0,"typo_rank":1,"proximity_rank":15,"attribute_rank":505,"exact_rank":5,"asc_desc_rank":0,"sort_by_rank":0,"title":"hell o","description":"hell o is the fourteenth episode of the american television series glee performing songs with this word","tag":"blue","":""}
|
{"id":"A","word_rank":0,"typo_rank":1,"proximity_rank":15,"attribute_rank":505,"exact_rank":5,"asc_desc_rank":0,"title":"hell o","description":"hell o is the fourteenth episode of the american television series glee performing songs with this word","tag":"blue","":""}
|
||||||
{"id":"B","word_rank":2,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":4,"asc_desc_rank":1,"sort_by_rank":2,"title":"hello","description":"hello is a song recorded by english singer songwriter adele","tag":"red","":""}
|
{"id":"B","word_rank":2,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":4,"asc_desc_rank":1,"title":"hello","description":"hello is a song recorded by english singer songwriter adele","tag":"red","":""}
|
||||||
{"id":"C","word_rank":0,"typo_rank":1,"proximity_rank":8,"attribute_rank":336,"exact_rank":4,"asc_desc_rank":2,"sort_by_rank":0,"title":"hell on earth","description":"hell on earth is the third studio album by american hip hop duo mobb deep","tag":"blue","":""}
|
{"id":"C","word_rank":0,"typo_rank":1,"proximity_rank":8,"attribute_rank":336,"exact_rank":4,"asc_desc_rank":2,"title":"hell on earth","description":"hell on earth is the third studio album by american hip hop duo mobb deep","tag":"blue","":""}
|
||||||
{"id":"D","word_rank":0,"typo_rank":1,"proximity_rank":10,"attribute_rank":757,"exact_rank":4,"asc_desc_rank":3,"sort_by_rank":2,"title":"hell on wheels tv series","description":"the construction of the first transcontinental railroad across the united states in the world","tag":"red","":""}
|
{"id":"D","word_rank":0,"typo_rank":1,"proximity_rank":10,"attribute_rank":757,"exact_rank":4,"asc_desc_rank":3,"title":"hell on wheels tv series","description":"the construction of the first transcontinental railroad across the united states in the world","tag":"red","":""}
|
||||||
{"id":"E","word_rank":2,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":4,"asc_desc_rank":4,"sort_by_rank":1,"title":"hello kitty","description":"also known by her full name kitty white is a fictional character produced by the japanese company sanrio","tag":"green","":""}
|
{"id":"E","word_rank":2,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":4,"asc_desc_rank":4,"title":"hello kitty","description":"also known by her full name kitty white is a fictional character produced by the japanese company sanrio","tag":"green","":""}
|
||||||
{"id":"F","word_rank":2,"typo_rank":1,"proximity_rank":0,"attribute_rank":1017,"exact_rank":5,"asc_desc_rank":5,"sort_by_rank":0,"title":"laptop orchestra","description":"a laptop orchestra lork or lo is a chamber music ensemble consisting primarily of laptops like helo huddersfield experimental laptop orchestra","tag":"blue","":""}
|
{"id":"F","word_rank":2,"typo_rank":1,"proximity_rank":0,"attribute_rank":1017,"exact_rank":5,"asc_desc_rank":5,"title":"laptop orchestra","description":"a laptop orchestra lork or lo is a chamber music ensemble consisting primarily of laptops like helo huddersfield experimental laptop orchestra","tag":"blue","":""}
|
||||||
{"id":"G","word_rank":1,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":5,"sort_by_rank":2,"title":"hello world film","description":"hello world is a 2019 japanese animated sci fi romantic drama film directed by tomohiko ito and produced by graphinica","tag":"red","":""}
|
{"id":"G","word_rank":1,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":5,"title":"hello world film","description":"hello world is a 2019 japanese animated sci fi romantic drama film directed by tomohiko ito and produced by graphinica","tag":"red","":""}
|
||||||
{"id":"H","word_rank":1,"typo_rank":0,"proximity_rank":1,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":4,"sort_by_rank":1,"title":"world hello day","description":"holiday observed on november 21 to express that conflicts should be resolved through communication rather than the use of force","tag":"green","":""}
|
{"id":"H","word_rank":1,"typo_rank":0,"proximity_rank":1,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":4,"title":"world hello day","description":"holiday observed on november 21 to express that conflicts should be resolved through communication rather than the use of force","tag":"green","":""}
|
||||||
{"id":"I","word_rank":0,"typo_rank":0,"proximity_rank":8,"attribute_rank":338,"exact_rank":3,"asc_desc_rank":3,"sort_by_rank":0,"title":"hello world song","description":"hello world is a song written by tom douglas tony lane and david lee and recorded by american country music group lady antebellum","tag":"blue","":""}
|
{"id":"I","word_rank":0,"typo_rank":0,"proximity_rank":8,"attribute_rank":338,"exact_rank":3,"asc_desc_rank":3,"title":"hello world song","description":"hello world is a song written by tom douglas tony lane and david lee and recorded by american country music group lady antebellum","tag":"blue","":""}
|
||||||
{"id":"J","word_rank":1,"typo_rank":0,"proximity_rank":1,"attribute_rank":1,"exact_rank":3,"asc_desc_rank":2,"sort_by_rank":1,"title":"hello cruel world","description":"hello cruel world is an album by new zealand band tall dwarfs","tag":"green","":""}
|
{"id":"J","word_rank":1,"typo_rank":0,"proximity_rank":1,"attribute_rank":1,"exact_rank":3,"asc_desc_rank":2,"title":"hello cruel world","description":"hello cruel world is an album by new zealand band tall dwarfs","tag":"green","":""}
|
||||||
{"id":"K","word_rank":0,"typo_rank":2,"proximity_rank":9,"attribute_rank":670,"exact_rank":5,"asc_desc_rank":1,"sort_by_rank":2,"title":"ello creation system","description":"in few word ello was a construction toy created by the american company mattel to engage girls in construction play","tag":"red","":""}
|
{"id":"K","word_rank":0,"typo_rank":2,"proximity_rank":9,"attribute_rank":670,"exact_rank":5,"asc_desc_rank":1,"title":"ello creation system","description":"in few word ello was a construction toy created by the american company mattel to engage girls in construction play","tag":"red","":""}
|
||||||
{"id":"L","word_rank":0,"typo_rank":0,"proximity_rank":2,"attribute_rank":250,"exact_rank":4,"asc_desc_rank":0,"sort_by_rank":0,"title":"good morning world","description":"good morning world is an american sitcom broadcast on cbs tv during the 1967 1968 season","tag":"blue","":""}
|
{"id":"L","word_rank":0,"typo_rank":0,"proximity_rank":2,"attribute_rank":250,"exact_rank":4,"asc_desc_rank":0,"title":"good morning world","description":"good morning world is an american sitcom broadcast on cbs tv during the 1967 1968 season","tag":"blue","":""}
|
||||||
{"id":"M","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":0,"asc_desc_rank":0,"sort_by_rank":2,"title":"hello world america","description":"a perfect match for a perfect engine using the query hello world america","tag":"red","":""}
|
{"id":"M","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":0,"asc_desc_rank":0,"title":"hello world america","description":"a perfect match for a perfect engine using the query hello world america","tag":"red","":""}
|
||||||
{"id":"N","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":1,"asc_desc_rank":4,"sort_by_rank":1,"title":"hello world america unleashed","description":"a very good match for a very good engine using the query hello world america","tag":"green","":""}
|
{"id":"N","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":1,"asc_desc_rank":4,"title":"hello world america unleashed","description":"a very good match for a very good engine using the query hello world america","tag":"green","":""}
|
||||||
{"id":"O","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":10,"exact_rank":0,"asc_desc_rank":6,"sort_by_rank":0,"title":"a perfect match for a perfect engine using the query hello world america","description":"hello world america","tag":"blue","":""}
|
{"id":"O","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":10,"exact_rank":0,"asc_desc_rank":6,"title":"a perfect match for a perfect engine using the query hello world america","description":"hello world america","tag":"blue","":""}
|
||||||
{"id":"P","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":12,"exact_rank":1,"asc_desc_rank":3,"sort_by_rank":2,"title":"a very good match for a very good engine using the query hello world america","description":"hello world america unleashed","tag":"red","":""}
|
{"id":"P","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":12,"exact_rank":1,"asc_desc_rank":3,"title":"a very good match for a very good engine using the query hello world america","description":"hello world america unleashed","tag":"red","":""}
|
||||||
{"id":"Q","word_rank":1,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":2,"sort_by_rank":1,"title":"hello world","description":"a hello world program generally is a computer program that outputs or displays the message hello world","tag":"green","":""}
|
{"id":"Q","word_rank":1,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":2,"title":"hello world","description":"a hello world program generally is a computer program that outputs or displays the message hello world","tag":"green","":""}
|
||||||
|
@ -32,7 +32,7 @@ macro_rules! test_distinct {
|
|||||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||||
|
|
||||||
let mut distinct_values = HashSet::new();
|
let mut distinct_values = HashSet::new();
|
||||||
let expected_external_ids: Vec<_> = search::expected_order(&criteria, true, true, &[])
|
let expected_external_ids: Vec<_> = search::expected_order(&criteria, true, true)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|d| {
|
.filter_map(|d| {
|
||||||
if distinct_values.contains(&d.$distinct) {
|
if distinct_values.contains(&d.$distinct) {
|
||||||
|
@ -29,7 +29,7 @@ macro_rules! test_filter {
|
|||||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||||
|
|
||||||
let filtered_ids = search::expected_filtered_ids($filter);
|
let filtered_ids = search::expected_filtered_ids($filter);
|
||||||
let expected_external_ids: Vec<_> = search::expected_order(&criteria, true, true, &[])
|
let expected_external_ids: Vec<_> = search::expected_order(&criteria, true, true)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|d| if filtered_ids.contains(&d.id) { Some(d.id) } else { None })
|
.filter_map(|d| if filtered_ids.contains(&d.id) { Some(d.id) } else { None })
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
use std::cmp::Reverse;
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use big_s::S;
|
use big_s::S;
|
||||||
@ -6,7 +5,7 @@ use either::{Either, Left, Right};
|
|||||||
use heed::EnvOpenOptions;
|
use heed::EnvOpenOptions;
|
||||||
use maplit::{hashmap, hashset};
|
use maplit::{hashmap, hashset};
|
||||||
use milli::update::{IndexDocuments, Settings, UpdateFormat};
|
use milli::update::{IndexDocuments, Settings, UpdateFormat};
|
||||||
use milli::{AscDesc, Criterion, DocumentId, Index};
|
use milli::{Criterion, DocumentId, Index};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use slice_group_by::GroupBy;
|
use slice_group_by::GroupBy;
|
||||||
|
|
||||||
@ -37,10 +36,6 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
|
|||||||
S("tag"),
|
S("tag"),
|
||||||
S("asc_desc_rank"),
|
S("asc_desc_rank"),
|
||||||
});
|
});
|
||||||
builder.set_sortable_fields(hashset! {
|
|
||||||
S("tag"),
|
|
||||||
S("asc_desc_rank"),
|
|
||||||
});
|
|
||||||
builder.set_synonyms(hashmap! {
|
builder.set_synonyms(hashmap! {
|
||||||
S("hello") => vec![S("good morning")],
|
S("hello") => vec![S("good morning")],
|
||||||
S("world") => vec![S("earth")],
|
S("world") => vec![S("earth")],
|
||||||
@ -72,7 +67,6 @@ pub fn expected_order(
|
|||||||
criteria: &[Criterion],
|
criteria: &[Criterion],
|
||||||
authorize_typo: bool,
|
authorize_typo: bool,
|
||||||
optional_words: bool,
|
optional_words: bool,
|
||||||
sort_by: &[AscDesc],
|
|
||||||
) -> Vec<TestDocument> {
|
) -> Vec<TestDocument> {
|
||||||
let dataset =
|
let dataset =
|
||||||
serde_json::Deserializer::from_str(CONTENT).into_iter().map(|r| r.unwrap()).collect();
|
serde_json::Deserializer::from_str(CONTENT).into_iter().map(|r| r.unwrap()).collect();
|
||||||
@ -96,14 +90,6 @@ pub fn expected_order(
|
|||||||
new_groups
|
new_groups
|
||||||
.extend(group.linear_group_by_key(|d| d.proximity_rank).map(Vec::from));
|
.extend(group.linear_group_by_key(|d| d.proximity_rank).map(Vec::from));
|
||||||
}
|
}
|
||||||
Criterion::Sort if sort_by == [AscDesc::Asc(S("tag"))] => {
|
|
||||||
group.sort_by_key(|d| d.sort_by_rank);
|
|
||||||
new_groups.extend(group.linear_group_by_key(|d| d.sort_by_rank).map(Vec::from));
|
|
||||||
}
|
|
||||||
Criterion::Sort if sort_by == [AscDesc::Desc(S("tag"))] => {
|
|
||||||
group.sort_by_key(|d| Reverse(d.sort_by_rank));
|
|
||||||
new_groups.extend(group.linear_group_by_key(|d| d.sort_by_rank).map(Vec::from));
|
|
||||||
}
|
|
||||||
Criterion::Typo => {
|
Criterion::Typo => {
|
||||||
group.sort_by_key(|d| d.typo_rank);
|
group.sort_by_key(|d| d.typo_rank);
|
||||||
new_groups.extend(group.linear_group_by_key(|d| d.typo_rank).map(Vec::from));
|
new_groups.extend(group.linear_group_by_key(|d| d.typo_rank).map(Vec::from));
|
||||||
@ -118,13 +104,11 @@ pub fn expected_order(
|
|||||||
.extend(group.linear_group_by_key(|d| d.asc_desc_rank).map(Vec::from));
|
.extend(group.linear_group_by_key(|d| d.asc_desc_rank).map(Vec::from));
|
||||||
}
|
}
|
||||||
Criterion::Desc(field_name) if field_name == "asc_desc_rank" => {
|
Criterion::Desc(field_name) if field_name == "asc_desc_rank" => {
|
||||||
group.sort_by_key(|d| Reverse(d.asc_desc_rank));
|
group.sort_by_key(|d| std::cmp::Reverse(d.asc_desc_rank));
|
||||||
new_groups
|
new_groups
|
||||||
.extend(group.linear_group_by_key(|d| d.asc_desc_rank).map(Vec::from));
|
.extend(group.linear_group_by_key(|d| d.asc_desc_rank).map(Vec::from));
|
||||||
}
|
}
|
||||||
Criterion::Asc(_) | Criterion::Desc(_) | Criterion::Sort => {
|
Criterion::Asc(_) | Criterion::Desc(_) => new_groups.push(group.clone()),
|
||||||
new_groups.push(group.clone())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
groups = std::mem::take(&mut new_groups);
|
groups = std::mem::take(&mut new_groups);
|
||||||
@ -201,7 +185,6 @@ pub struct TestDocument {
|
|||||||
pub attribute_rank: u32,
|
pub attribute_rank: u32,
|
||||||
pub exact_rank: u32,
|
pub exact_rank: u32,
|
||||||
pub asc_desc_rank: u32,
|
pub asc_desc_rank: u32,
|
||||||
pub sort_by_rank: u32,
|
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub description: String,
|
pub description: String,
|
||||||
pub tag: String,
|
pub tag: String,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use big_s::S;
|
use big_s::S;
|
||||||
use milli::update::Settings;
|
use milli::update::Settings;
|
||||||
use milli::{AscDesc, Criterion, Search, SearchResult};
|
use milli::{Criterion, Search, SearchResult};
|
||||||
use Criterion::*;
|
use Criterion::*;
|
||||||
|
|
||||||
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
|
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
|
||||||
@ -11,7 +11,7 @@ const ALLOW_OPTIONAL_WORDS: bool = true;
|
|||||||
const DISALLOW_OPTIONAL_WORDS: bool = false;
|
const DISALLOW_OPTIONAL_WORDS: bool = false;
|
||||||
|
|
||||||
macro_rules! test_criterion {
|
macro_rules! test_criterion {
|
||||||
($func:ident, $optional_word:ident, $authorize_typos:ident, $criteria:expr, $sort_criteria:expr) => {
|
($func:ident, $optional_word:ident, $authorize_typos:ident, $criteria:expr) => {
|
||||||
#[test]
|
#[test]
|
||||||
fn $func() {
|
fn $func() {
|
||||||
let criteria = $criteria;
|
let criteria = $criteria;
|
||||||
@ -23,168 +23,82 @@ macro_rules! test_criterion {
|
|||||||
search.limit(EXTERNAL_DOCUMENTS_IDS.len());
|
search.limit(EXTERNAL_DOCUMENTS_IDS.len());
|
||||||
search.authorize_typos($authorize_typos);
|
search.authorize_typos($authorize_typos);
|
||||||
search.optional_words($optional_word);
|
search.optional_words($optional_word);
|
||||||
search.sort_criteria($sort_criteria);
|
|
||||||
|
|
||||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||||
|
|
||||||
let expected_external_ids: Vec<_> = search::expected_order(
|
let expected_external_ids: Vec<_> =
|
||||||
&criteria,
|
search::expected_order(&criteria, $authorize_typos, $optional_word)
|
||||||
$authorize_typos,
|
.into_iter()
|
||||||
$optional_word,
|
.map(|d| d.id)
|
||||||
&$sort_criteria[..],
|
.collect();
|
||||||
)
|
|
||||||
.into_iter()
|
|
||||||
.map(|d| d.id)
|
|
||||||
.collect();
|
|
||||||
let documents_ids = search::internal_to_external_ids(&index, &documents_ids);
|
let documents_ids = search::internal_to_external_ids(&index, &documents_ids);
|
||||||
assert_eq!(documents_ids, expected_external_ids);
|
assert_eq!(documents_ids, expected_external_ids);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
test_criterion!(none_allow_typo, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, vec![], vec![]);
|
test_criterion!(none_allow_typo, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, vec![]);
|
||||||
test_criterion!(none_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, vec![], vec![]);
|
test_criterion!(none_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, vec![]);
|
||||||
test_criterion!(words_allow_typo, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, vec![Words], vec![]);
|
test_criterion!(words_allow_typo, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, vec![Words]);
|
||||||
test_criterion!(
|
test_criterion!(attribute_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, vec![Attribute]);
|
||||||
attribute_allow_typo,
|
test_criterion!(attribute_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, vec![Attribute]);
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
test_criterion!(exactness_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, vec![Exactness]);
|
||||||
ALLOW_TYPOS,
|
test_criterion!(exactness_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, vec![Exactness]);
|
||||||
vec![Attribute],
|
test_criterion!(proximity_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, vec![Proximity]);
|
||||||
vec![]
|
test_criterion!(proximity_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, vec![Proximity]);
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
attribute_disallow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
DISALLOW_TYPOS,
|
|
||||||
vec![Attribute],
|
|
||||||
vec![]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
exactness_allow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
ALLOW_TYPOS,
|
|
||||||
vec![Exactness],
|
|
||||||
vec![]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
exactness_disallow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
DISALLOW_TYPOS,
|
|
||||||
vec![Exactness],
|
|
||||||
vec![]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
proximity_allow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
ALLOW_TYPOS,
|
|
||||||
vec![Proximity],
|
|
||||||
vec![]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
proximity_disallow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
DISALLOW_TYPOS,
|
|
||||||
vec![Proximity],
|
|
||||||
vec![]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
asc_allow_typo,
|
asc_allow_typo,
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
DISALLOW_OPTIONAL_WORDS,
|
||||||
ALLOW_TYPOS,
|
ALLOW_TYPOS,
|
||||||
vec![Asc(S("asc_desc_rank"))],
|
vec![Asc(S("asc_desc_rank"))]
|
||||||
vec![]
|
|
||||||
);
|
);
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
asc_disallow_typo,
|
asc_disallow_typo,
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
DISALLOW_OPTIONAL_WORDS,
|
||||||
DISALLOW_TYPOS,
|
DISALLOW_TYPOS,
|
||||||
vec![Asc(S("asc_desc_rank"))],
|
vec![Asc(S("asc_desc_rank"))]
|
||||||
vec![]
|
|
||||||
);
|
);
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
desc_allow_typo,
|
desc_allow_typo,
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
DISALLOW_OPTIONAL_WORDS,
|
||||||
ALLOW_TYPOS,
|
ALLOW_TYPOS,
|
||||||
vec![Desc(S("asc_desc_rank"))],
|
vec![Desc(S("asc_desc_rank"))]
|
||||||
vec![]
|
|
||||||
);
|
);
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
desc_disallow_typo,
|
desc_disallow_typo,
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
DISALLOW_OPTIONAL_WORDS,
|
||||||
DISALLOW_TYPOS,
|
DISALLOW_TYPOS,
|
||||||
vec![Desc(S("asc_desc_rank"))],
|
vec![Desc(S("asc_desc_rank"))]
|
||||||
vec![]
|
|
||||||
);
|
);
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
asc_unexisting_field_allow_typo,
|
asc_unexisting_field_allow_typo,
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
DISALLOW_OPTIONAL_WORDS,
|
||||||
ALLOW_TYPOS,
|
ALLOW_TYPOS,
|
||||||
vec![Asc(S("unexisting_field"))],
|
vec![Asc(S("unexisting_field"))]
|
||||||
vec![]
|
|
||||||
);
|
);
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
asc_unexisting_field_disallow_typo,
|
asc_unexisting_field_disallow_typo,
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
DISALLOW_OPTIONAL_WORDS,
|
||||||
DISALLOW_TYPOS,
|
DISALLOW_TYPOS,
|
||||||
vec![Asc(S("unexisting_field"))],
|
vec![Asc(S("unexisting_field"))]
|
||||||
vec![]
|
|
||||||
);
|
);
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
desc_unexisting_field_allow_typo,
|
desc_unexisting_field_allow_typo,
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
DISALLOW_OPTIONAL_WORDS,
|
||||||
ALLOW_TYPOS,
|
ALLOW_TYPOS,
|
||||||
vec![Desc(S("unexisting_field"))],
|
vec![Desc(S("unexisting_field"))]
|
||||||
vec![]
|
|
||||||
);
|
);
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
desc_unexisting_field_disallow_typo,
|
desc_unexisting_field_disallow_typo,
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
DISALLOW_OPTIONAL_WORDS,
|
||||||
DISALLOW_TYPOS,
|
DISALLOW_TYPOS,
|
||||||
vec![Desc(S("unexisting_field"))],
|
vec![Desc(S("unexisting_field"))]
|
||||||
vec![]
|
|
||||||
);
|
|
||||||
test_criterion!(empty_sort_by_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, vec![Sort], vec![]);
|
|
||||||
test_criterion!(
|
|
||||||
empty_sort_by_disallow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
DISALLOW_TYPOS,
|
|
||||||
vec![Sort],
|
|
||||||
vec![]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
sort_by_asc_allow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
ALLOW_TYPOS,
|
|
||||||
vec![Sort],
|
|
||||||
vec![AscDesc::Asc(S("tag"))]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
sort_by_asc_disallow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
DISALLOW_TYPOS,
|
|
||||||
vec![Sort],
|
|
||||||
vec![AscDesc::Asc(S("tag"))]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
sort_by_desc_allow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
ALLOW_TYPOS,
|
|
||||||
vec![Sort],
|
|
||||||
vec![AscDesc::Desc(S("tag"))]
|
|
||||||
);
|
|
||||||
test_criterion!(
|
|
||||||
sort_by_desc_disallow_typo,
|
|
||||||
DISALLOW_OPTIONAL_WORDS,
|
|
||||||
DISALLOW_TYPOS,
|
|
||||||
vec![Sort],
|
|
||||||
vec![AscDesc::Desc(S("tag"))]
|
|
||||||
);
|
);
|
||||||
test_criterion!(
|
test_criterion!(
|
||||||
default_criteria_order,
|
default_criteria_order,
|
||||||
ALLOW_OPTIONAL_WORDS,
|
ALLOW_OPTIONAL_WORDS,
|
||||||
ALLOW_TYPOS,
|
ALLOW_TYPOS,
|
||||||
vec![Words, Typo, Proximity, Attribute, Exactness],
|
vec![Words, Typo, Proximity, Attribute, Exactness]
|
||||||
vec![]
|
|
||||||
);
|
);
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -348,7 +262,7 @@ fn criteria_mixup() {
|
|||||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||||
|
|
||||||
let expected_external_ids: Vec<_> =
|
let expected_external_ids: Vec<_> =
|
||||||
search::expected_order(&criteria, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, &[])
|
search::expected_order(&criteria, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|d| d.id)
|
.map(|d| d.id)
|
||||||
.collect();
|
.collect();
|
||||||
|
Loading…
Reference in New Issue
Block a user