mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-29 08:35:15 +08:00
commit
71578a5462
@ -1,5 +1,6 @@
|
||||
## v0.10.2
|
||||
|
||||
- Add support for facet count (#676)
|
||||
- Add support for faceted search (#631)
|
||||
- Add support for configuring the lmdb map size (#646, #647)
|
||||
- Add exposed port for Dockerfile (#654)
|
||||
|
94
Cargo.lock
generated
94
Cargo.lock
generated
@ -522,9 +522,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "0.2.12"
|
||||
version = "0.2.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2889e6d50f394968c8bf4240dc3f2a7eb4680844d27308f798229ac9d4725f41"
|
||||
checksum = "31accafdb70df7871592c058eca3985b71104e15ac32f64706022c58867da931"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"memchr",
|
||||
@ -616,9 +616,9 @@ checksum = "5b89647f09b9f4c838cb622799b2843e4e13bff64661dab9a0362bb92985addd"
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "2.33.0"
|
||||
version = "2.33.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9"
|
||||
checksum = "bdfa80d47f954d53a35a64987ca1422f495b8d6483c0fe9f7117b36c2a792129"
|
||||
dependencies = [
|
||||
"ansi_term",
|
||||
"atty",
|
||||
@ -1008,9 +1008,9 @@ checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
|
||||
|
||||
[[package]]
|
||||
name = "futures"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c329ae8753502fb44ae4fc2b622fa2a94652c41e795143765ba0927f92ab780"
|
||||
checksum = "1e05b85ec287aac0dc34db7d4a569323df697f9c55b99b15d6b4ef8cde49f613"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
@ -1023,9 +1023,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "futures-channel"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0c77d04ce8edd9cb903932b608268b3fffec4163dc053b3b402bf47eac1f1a8"
|
||||
checksum = "f366ad74c28cca6ba456d95e6422883cfb4b252a83bed929c83abfdbbf2967d5"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
@ -1033,15 +1033,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "futures-core"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f25592f769825e89b92358db00d26f965761e094951ac44d3663ef25b7ac464a"
|
||||
checksum = "59f5fff90fd5d971f936ad674802482ba441b6f09ba5e15fd8b39145582ca399"
|
||||
|
||||
[[package]]
|
||||
name = "futures-executor"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f674f3e1bcb15b37284a90cedf55afdba482ab061c407a9c0ebbd0f3109741ba"
|
||||
checksum = "10d6bb888be1153d3abeb9006b11b02cf5e9b209fda28693c31ae1e4e012e314"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-task",
|
||||
@ -1050,15 +1050,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a638959aa96152c7a4cddf50fcb1e3fede0583b27157c26e67d6f99904090dc6"
|
||||
checksum = "de27142b013a8e869c14957e6d2edeef89e97c289e69d042ee3a49acd8b51789"
|
||||
|
||||
[[package]]
|
||||
name = "futures-macro"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a5081aa3de1f7542a794a397cde100ed903b0630152d0973479018fd85423a7"
|
||||
checksum = "d0b5a30a4328ab5473878237c447333c093297bded83a4983d10f4deea240d39"
|
||||
dependencies = [
|
||||
"proc-macro-hack",
|
||||
"proc-macro2",
|
||||
@ -1068,21 +1068,24 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "futures-sink"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3466821b4bc114d95b087b850a724c6f83115e929bc88f1fa98a3304a944c8a6"
|
||||
checksum = "3f2032893cb734c7a05d85ce0cc8b8c4075278e93b24b66f9de99d6eb0fa8acc"
|
||||
|
||||
[[package]]
|
||||
name = "futures-task"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7b0a34e53cf6cdcd0178aa573aed466b646eb3db769570841fda0c7ede375a27"
|
||||
checksum = "bdb66b5f09e22019b1ab0830f7785bcea8e7a42148683f99214f73f8ec21a626"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-util"
|
||||
version = "0.3.4"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22766cf25d64306bedf0384da004d05c9974ab104fcc4528f1236181c18004c5"
|
||||
checksum = "8764574ff08b701a084482c3c7031349104b07ac897393010494beaa18ce32c6"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
@ -1091,6 +1094,7 @@ dependencies = [
|
||||
"futures-sink",
|
||||
"futures-task",
|
||||
"memchr",
|
||||
"pin-project",
|
||||
"pin-utils",
|
||||
"proc-macro-hack",
|
||||
"proc-macro-nested",
|
||||
@ -1398,9 +1402,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "intervaltree"
|
||||
version = "0.2.5"
|
||||
version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8254add2ea664734c9d001f8151cc3d7696b135f7e40e5a2efa814a662cb3a44"
|
||||
checksum = "566d5aa3b5cc5c5809cc1a9c9588d917a634248bfc58f7ea14e354e71595a32c"
|
||||
dependencies = [
|
||||
"smallvec",
|
||||
]
|
||||
@ -1660,8 +1664,8 @@ dependencies = [
|
||||
"mime",
|
||||
"pretty-bytes",
|
||||
"rand 0.7.3",
|
||||
"sentry",
|
||||
"regex",
|
||||
"sentry",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_qs",
|
||||
@ -1842,9 +1846,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ntapi"
|
||||
version = "0.3.3"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602"
|
||||
checksum = "7a31937dea023539c72ddae0e3571deadc1414b300483fa7aaec176168cfa9d2"
|
||||
dependencies = [
|
||||
"winapi 0.3.8",
|
||||
]
|
||||
@ -1918,9 +1922,9 @@ checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
|
||||
|
||||
[[package]]
|
||||
name = "openssl-sys"
|
||||
version = "0.9.55"
|
||||
version = "0.9.56"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7717097d810a0f2e2323f9e5d11e71608355e24828410b55b9d4f18aa5f9a5d8"
|
||||
checksum = "f02309a7f127000ed50594f0b50ecc69e7c654e16d41b4e8156d1b3df8e0b52e"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"cc",
|
||||
@ -2038,18 +2042,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pin-project"
|
||||
version = "0.4.13"
|
||||
version = "0.4.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "82c3bfbfb5bb42f99498c7234bbd768c220eb0cea6818259d0d18a1aa3d2595d"
|
||||
checksum = "81d480cb4e89522ccda96d0eed9af94180b7a5f93fb28f66e1fd7d68431663d1"
|
||||
dependencies = [
|
||||
"pin-project-internal",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-internal"
|
||||
version = "0.4.13"
|
||||
version = "0.4.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccbf6449dcfb18562c015526b085b8df1aa3cdab180af8ec2ebd300a3bd28f63"
|
||||
checksum = "a82996f11efccb19b685b14b5df818de31c1edcee3daa256ab5775dd98e72feb"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -2466,9 +2470,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
version = "0.1.18"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "039c25b130bd8c1321ee2d7de7fde2659fa9c2744e4bb29711cfc852ea53cd19"
|
||||
checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"winapi 0.3.8",
|
||||
@ -2498,9 +2502,9 @@ checksum = "cbb21fe0588557792176c89bc7b943027b14f346d03c6be6a199c2860277d93a"
|
||||
|
||||
[[package]]
|
||||
name = "security-framework"
|
||||
version = "0.4.3"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f331b9025654145cd425b9ded0caf8f5ae0df80d418b326e2dc1c3dc5eb0620"
|
||||
checksum = "64808902d7d99f78eaddd2b4e2509713babc3dc3c85ad6f4c447680f3c01e535"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"core-foundation",
|
||||
@ -2575,18 +2579,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.106"
|
||||
version = "1.0.110"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399"
|
||||
checksum = "99e7b308464d16b56eba9964e4972a3eee817760ab60d88c3f86e1fecb08204c"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.106"
|
||||
version = "1.0.110"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e549e3abf4fb8621bd1609f11dfc9f5e50320802273b12f3811a67e6716ea6c"
|
||||
checksum = "818fbf6bfa9a42d3bfcaca148547aa00c7b915bec71d1757aa2d44ca68771984"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -2595,9 +2599,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.52"
|
||||
version = "1.0.53"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7894c8ed05b7a3a279aeb79025fdec1d3158080b75b98a08faf2806bb799edd"
|
||||
checksum = "993948e75b189211a9b31a7528f950c6adc21f9720b6438ff80a7fa2f864cea2"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"itoa",
|
||||
@ -2853,9 +2857,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "threadpool"
|
||||
version = "1.8.0"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8dae184447c15d5a6916d973c642aec485105a13cd238192a6927ae3e077d66"
|
||||
checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
|
||||
dependencies = [
|
||||
"num_cpus",
|
||||
]
|
||||
|
@ -371,12 +371,12 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
|
||||
});
|
||||
}
|
||||
|
||||
let (documents, _nb_hits) = builder.query(ref_reader, &query, 0..command.number_results)?;
|
||||
let result = builder.query(ref_reader, &query, 0..command.number_results)?;
|
||||
|
||||
let mut retrieve_duration = Duration::default();
|
||||
|
||||
let number_of_documents = documents.len();
|
||||
for mut doc in documents {
|
||||
let number_of_documents = result.documents.len();
|
||||
for mut doc in result.documents {
|
||||
doc.highlights
|
||||
.sort_unstable_by_key(|m| (m.char_index, m.char_length));
|
||||
|
||||
|
@ -11,7 +11,7 @@ use std::fmt;
|
||||
use compact_arena::{SmallArena, Idx32, mk_arena};
|
||||
use log::debug;
|
||||
use meilisearch_types::DocIndex;
|
||||
use sdset::{Set, SetBuf, exponential_search, SetOperation};
|
||||
use sdset::{Set, SetBuf, exponential_search, SetOperation, Counter, duo::OpBuilder};
|
||||
use slice_group_by::{GroupBy, GroupByMut};
|
||||
|
||||
use crate::error::Error;
|
||||
@ -24,11 +24,21 @@ use crate::query_tree::{create_query_tree, traverse_query_tree};
|
||||
use crate::query_tree::{Operation, QueryResult, QueryKind, QueryId, PostingsKey};
|
||||
use crate::query_tree::Context as QTContext;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SortResult {
|
||||
pub documents: Vec<Document>,
|
||||
pub nb_hits: usize,
|
||||
pub exhaustive_nb_hit: bool,
|
||||
pub facets: Option<HashMap<String, HashMap<String, usize>>>,
|
||||
pub exhaustive_facet_count: Option<bool>,
|
||||
}
|
||||
|
||||
pub fn bucket_sort<'c, FI>(
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
facets_docids: Option<SetBuf<DocumentId>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>>,
|
||||
filter: Option<FI>,
|
||||
criteria: Criteria<'c>,
|
||||
searchable_attrs: Option<ReorderedAttrs>,
|
||||
@ -38,7 +48,7 @@ pub fn bucket_sort<'c, FI>(
|
||||
synonyms_store: store::Synonyms,
|
||||
prefix_documents_cache_store: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
||||
) -> MResult<(Vec<Document>, usize)>
|
||||
) -> MResult<SortResult>
|
||||
where
|
||||
FI: Fn(DocumentId) -> bool,
|
||||
{
|
||||
@ -52,6 +62,7 @@ where
|
||||
query,
|
||||
range,
|
||||
facets_docids,
|
||||
facet_count_docids,
|
||||
filter,
|
||||
distinct,
|
||||
distinct_size,
|
||||
@ -66,9 +77,11 @@ where
|
||||
);
|
||||
}
|
||||
|
||||
let mut result = SortResult::default();
|
||||
|
||||
let words_set = match unsafe { main_store.static_words_fst(reader)? } {
|
||||
Some(words) => words,
|
||||
None => return Ok((Vec::new(), 0)),
|
||||
None => return Ok(SortResult::default()),
|
||||
};
|
||||
|
||||
let stop_words = main_store.stop_words_fst(reader)?.unwrap_or_default();
|
||||
@ -107,6 +120,12 @@ where
|
||||
docids = Cow::Owned(intersection);
|
||||
}
|
||||
|
||||
if let Some(f) = facet_count_docids {
|
||||
// hardcoded value, until approximation optimization
|
||||
result.exhaustive_facet_count = Some(true);
|
||||
result.facets = Some(facet_count(f, &docids));
|
||||
}
|
||||
|
||||
let before = Instant::now();
|
||||
mk_arena!(arena);
|
||||
let mut bare_matches = cleanup_bare_matches(&mut arena, &docids, queries);
|
||||
@ -181,7 +200,10 @@ where
|
||||
|
||||
debug!("bucket sort took {:.02?}", before_bucket_sort.elapsed());
|
||||
|
||||
Ok((documents, docids.len()))
|
||||
result.documents = documents;
|
||||
result.nb_hits = docids.len();
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||
@ -189,6 +211,7 @@ pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
facets_docids: Option<SetBuf<DocumentId>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>>,
|
||||
filter: Option<FI>,
|
||||
distinct: FD,
|
||||
distinct_size: usize,
|
||||
@ -200,14 +223,16 @@ pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||
synonyms_store: store::Synonyms,
|
||||
_prefix_documents_cache_store: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
||||
) -> MResult<(Vec<Document>, usize)>
|
||||
) -> MResult<SortResult>
|
||||
where
|
||||
FI: Fn(DocumentId) -> bool,
|
||||
FD: Fn(DocumentId) -> Option<u64>,
|
||||
{
|
||||
let mut result = SortResult::default();
|
||||
|
||||
let words_set = match unsafe { main_store.static_words_fst(reader)? } {
|
||||
Some(words) => words,
|
||||
None => return Ok((Vec::new(), 0)),
|
||||
None => return Ok(SortResult::default()),
|
||||
};
|
||||
|
||||
let stop_words = main_store.stop_words_fst(reader)?.unwrap_or_default();
|
||||
@ -240,12 +265,18 @@ where
|
||||
debug!("number of postings {:?}", queries.len());
|
||||
|
||||
if let Some(facets_docids) = facets_docids {
|
||||
let intersection = sdset::duo::OpBuilder::new(docids.as_ref(), facets_docids.as_set())
|
||||
let intersection = OpBuilder::new(docids.as_ref(), facets_docids.as_set())
|
||||
.intersection()
|
||||
.into_set_buf();
|
||||
docids = Cow::Owned(intersection);
|
||||
}
|
||||
|
||||
if let Some(f) = facet_count_docids {
|
||||
// hardcoded value, until approximation optimization
|
||||
result.exhaustive_facet_count = Some(true);
|
||||
result.facets = Some(facet_count(f, &docids));
|
||||
}
|
||||
|
||||
let before = Instant::now();
|
||||
mk_arena!(arena);
|
||||
let mut bare_matches = cleanup_bare_matches(&mut arena, &docids, queries);
|
||||
@ -379,8 +410,10 @@ where
|
||||
}
|
||||
}
|
||||
}
|
||||
result.documents = documents;
|
||||
result.nb_hits = docids.len();
|
||||
|
||||
Ok((documents, docids.len()))
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn cleanup_bare_matches<'tag, 'txn>(
|
||||
@ -575,3 +608,22 @@ impl Deref for PostingsListView<'_> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// For each entry in facet_docids, calculates the number of documents in the intersection with candidate_docids.
|
||||
fn facet_count(
|
||||
facet_docids: HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>,
|
||||
candidate_docids: &Set<DocumentId>,
|
||||
) -> HashMap<String, HashMap<String, usize>> {
|
||||
let mut facets_counts = HashMap::with_capacity(facet_docids.len());
|
||||
for (key, doc_map) in facet_docids {
|
||||
let mut count_map = HashMap::with_capacity(doc_map.len());
|
||||
for (value, docids) in doc_map {
|
||||
let mut counter = Counter::new();
|
||||
let op = OpBuilder::new(docids.as_ref(), candidate_docids).intersection();
|
||||
SetOperation::<DocumentId>::extend_collection(op, &mut counter);
|
||||
count_map.insert(value, counter.0);
|
||||
}
|
||||
facets_counts.insert(key, count_map);
|
||||
}
|
||||
facets_counts
|
||||
}
|
||||
|
@ -371,6 +371,7 @@ impl Database {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use crate::bucket_sort::SortResult;
|
||||
use crate::criterion::{self, CriteriaBuilder};
|
||||
use crate::update::{ProcessedUpdateResult, UpdateStatus};
|
||||
use crate::settings::Settings;
|
||||
@ -675,8 +676,8 @@ mod tests {
|
||||
|
||||
// even try to search for a document
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
let (results, _nb_hits) = index.query_builder().query(&reader, "21 ", 0..20).unwrap();
|
||||
assert_matches!(results.len(), 1);
|
||||
let SortResult {documents, .. } = index.query_builder().query(&reader, "21 ", 0..20).unwrap();
|
||||
assert_matches!(documents.len(), 1);
|
||||
|
||||
reader.abort();
|
||||
|
||||
@ -1073,8 +1074,8 @@ mod tests {
|
||||
|
||||
let builder = index.query_builder_with_criteria(criteria);
|
||||
|
||||
let (results, _nb_hits) = builder.query(&reader, "Kevin", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "Kevin", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(
|
||||
iter.next(),
|
||||
|
@ -1,27 +1,31 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::{Range, Deref};
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::database::MainT;
|
||||
use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct};
|
||||
use crate::{criterion::Criteria, Document, DocumentId};
|
||||
use crate::{reordered_attrs::ReorderedAttrs, store, MResult};
|
||||
use crate::facets::FacetFilter;
|
||||
|
||||
use either::Either;
|
||||
use sdset::SetOperation;
|
||||
|
||||
pub struct QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
use meilisearch_schema::FieldId;
|
||||
|
||||
use crate::database::MainT;
|
||||
use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct, SortResult};
|
||||
use crate::{criterion::Criteria, DocumentId};
|
||||
use crate::{reordered_attrs::ReorderedAttrs, store, MResult};
|
||||
use crate::facets::FacetFilter;
|
||||
|
||||
pub struct QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
criteria: Criteria<'c>,
|
||||
searchable_attrs: Option<ReorderedAttrs>,
|
||||
filter: Option<Box<dyn Fn(DocumentId) -> bool + 'f>>,
|
||||
distinct: Option<(Box<dyn Fn(DocumentId) -> Option<u64> + 'd>, usize)>,
|
||||
timeout: Option<Duration>,
|
||||
index: &'i store::Index,
|
||||
facets: Option<&'fa FacetFilter>,
|
||||
facet_filter: Option<FacetFilter>,
|
||||
facets: Option<Vec<(FieldId, String)>>,
|
||||
}
|
||||
|
||||
impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
pub fn new(index: &'i store::Index) -> Self {
|
||||
QueryBuilder::with_criteria(
|
||||
index,
|
||||
@ -29,7 +33,13 @@ impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn set_facets(&mut self, facets: Option<&'fa FacetFilter>) {
|
||||
/// sets facet attributes to filter on
|
||||
pub fn set_facet_filter(&mut self, facets: Option<FacetFilter>) {
|
||||
self.facet_filter = facets;
|
||||
}
|
||||
|
||||
/// sets facet attributes for which to return the count
|
||||
pub fn set_facets(&mut self, facets: Option<Vec<(FieldId, String)>>) {
|
||||
self.facets = facets;
|
||||
}
|
||||
|
||||
@ -44,6 +54,7 @@ impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
distinct: None,
|
||||
timeout: None,
|
||||
index,
|
||||
facet_filter: None,
|
||||
facets: None,
|
||||
}
|
||||
}
|
||||
@ -76,8 +87,8 @@ impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
) -> MResult<(Vec<Document>, usize)> {
|
||||
let facets_docids = match self.facets {
|
||||
) -> MResult<SortResult> {
|
||||
let facets_docids = match self.facet_filter {
|
||||
Some(facets) => {
|
||||
let mut ands = Vec::with_capacity(facets.len());
|
||||
let mut ors = Vec::new();
|
||||
@ -98,7 +109,7 @@ impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
match self.index.facets.facet_document_ids(reader, &key)? {
|
||||
Some(docids) => ands.push(docids),
|
||||
// no candidates for search, early return.
|
||||
None => return Ok((vec![], 0)),
|
||||
None => return Ok(SortResult::default()),
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -109,12 +120,33 @@ impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
None => None
|
||||
};
|
||||
|
||||
// for each field to retrieve the count for, create an HashMap associating the attribute
|
||||
// value to a set of matching documents. The HashMaps are them collected in another
|
||||
// HashMap, associating each HashMap to it's field.
|
||||
let facet_count_docids = match self.facets {
|
||||
Some(field_ids) => {
|
||||
let mut facet_count_map = HashMap::new();
|
||||
for (field_id, field_name) in field_ids {
|
||||
let mut key_map = HashMap::new();
|
||||
for pair in self.index.facets.field_document_ids(reader, field_id)? {
|
||||
let (facet_key, document_ids) = pair?;
|
||||
let value = facet_key.value();
|
||||
key_map.insert(value.to_string(), document_ids);
|
||||
}
|
||||
facet_count_map.insert(field_name, key_map);
|
||||
}
|
||||
Some(facet_count_map)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
match self.distinct {
|
||||
Some((distinct, distinct_size)) => bucket_sort_with_distinct(
|
||||
reader,
|
||||
query,
|
||||
range,
|
||||
facets_docids,
|
||||
facet_count_docids,
|
||||
self.filter,
|
||||
distinct,
|
||||
distinct_size,
|
||||
@ -132,6 +164,7 @@ impl<'c, 'f, 'd, 'fa, 'i> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
query,
|
||||
range,
|
||||
facets_docids,
|
||||
facet_count_docids,
|
||||
self.filter,
|
||||
self.criteria,
|
||||
self.searchable_attrs,
|
||||
@ -159,6 +192,7 @@ mod tests {
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::DocIndex;
|
||||
use crate::Document;
|
||||
use crate::automaton::normalize_str;
|
||||
use crate::bucket_sort::SimpleMatch;
|
||||
use crate::database::{Database,DatabaseOptions};
|
||||
@ -349,8 +383,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "iphone from apple", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "iphone from apple", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -372,8 +406,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -383,8 +417,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -406,7 +440,7 @@ mod tests {
|
||||
|
||||
// let builder = store.query_builder();
|
||||
// let results = builder.query(&reader, "sal", 0..20).unwrap();
|
||||
// let mut iter = results.into_iter();
|
||||
// let mut iter = documents.into_iter();
|
||||
|
||||
// assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
// let mut matches = matches.into_iter();
|
||||
@ -417,7 +451,7 @@ mod tests {
|
||||
|
||||
// let builder = store.query_builder();
|
||||
// let results = builder.query(&reader, "bonj", 0..20).unwrap();
|
||||
// let mut iter = results.into_iter();
|
||||
// let mut iter = documents.into_iter();
|
||||
|
||||
// assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
// let mut matches = matches.into_iter();
|
||||
@ -428,13 +462,13 @@ mod tests {
|
||||
|
||||
// let builder = store.query_builder();
|
||||
// let results = builder.query(&reader, "sal blabla", 0..20).unwrap();
|
||||
// let mut iter = results.into_iter();
|
||||
// let mut iter = documents.into_iter();
|
||||
|
||||
// assert_matches!(iter.next(), None);
|
||||
|
||||
// let builder = store.query_builder();
|
||||
// let results = builder.query(&reader, "bonj blabla", 0..20).unwrap();
|
||||
// let mut iter = results.into_iter();
|
||||
// let mut iter = documents.into_iter();
|
||||
|
||||
// assert_matches!(iter.next(), None);
|
||||
// }
|
||||
@ -450,7 +484,7 @@ mod tests {
|
||||
|
||||
// let builder = store.query_builder();
|
||||
// let results = builder.query(&reader, "salutution", 0..20).unwrap();
|
||||
// let mut iter = results.into_iter();
|
||||
// let mut iter = documents.into_iter();
|
||||
|
||||
// assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
// let mut matches = matches.into_iter();
|
||||
@ -461,7 +495,7 @@ mod tests {
|
||||
|
||||
// let builder = store.query_builder();
|
||||
// let results = builder.query(&reader, "saluttion", 0..20).unwrap();
|
||||
// let mut iter = results.into_iter();
|
||||
// let mut iter = documents.into_iter();
|
||||
|
||||
// assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
// let mut matches = matches.into_iter();
|
||||
@ -487,8 +521,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -508,8 +542,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -529,8 +563,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "salut", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "salut", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -575,8 +609,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -597,8 +631,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -639,8 +673,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -663,8 +697,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "new york", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "new york", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -697,8 +731,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -714,8 +748,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "new york subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "new york subway", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -762,8 +796,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -784,8 +818,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -837,8 +871,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway broken", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway broken", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -853,8 +887,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -909,10 +943,10 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder
|
||||
let SortResult {documents, .. } = builder
|
||||
.query(&reader, "new york underground train broken", 0..20)
|
||||
.unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -939,10 +973,10 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder
|
||||
let SortResult {documents, .. } = builder
|
||||
.query(&reader, "new york city underground train broken", 0..20)
|
||||
.unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -983,8 +1017,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "new york big ", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "new york big ", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -1017,8 +1051,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "NY subway ", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway ", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -1067,10 +1101,10 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder
|
||||
let SortResult {documents, .. } = builder
|
||||
.query(&reader, "new york city long subway cool ", 0..20)
|
||||
.unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut matches = matches.into_iter();
|
||||
@ -1100,8 +1134,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "telephone", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "telephone", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -1117,8 +1151,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "téléphone", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "téléphone", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -1134,8 +1168,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "télephone", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "télephone", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -1161,8 +1195,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "i phone case", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "i phone case", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -1190,8 +1224,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -1230,8 +1264,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
@ -1262,8 +1296,8 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let (results, _nb_hits) = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = results.into_iter();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
let mut iter = matches.into_iter();
|
||||
|
@ -1,10 +1,11 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use heed::{RwTxn, RoTxn, Result as ZResult};
|
||||
use heed::{RwTxn, RoTxn, Result as ZResult, RoRange};
|
||||
use sdset::{SetBuf, Set, SetOperation};
|
||||
|
||||
use meilisearch_types::DocumentId;
|
||||
use meilisearch_schema::FieldId;
|
||||
|
||||
use crate::database::MainT;
|
||||
use crate::facets::FacetKey;
|
||||
@ -22,6 +23,10 @@ impl Facets {
|
||||
self.facets.put(writer, &facet_key, doc_ids)
|
||||
}
|
||||
|
||||
pub fn field_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, field_id: FieldId) -> ZResult<RoRange<'txn, FacetKey, CowSet<DocumentId>>> {
|
||||
self.facets.prefix_iter(reader, &FacetKey::new(field_id, String::new()))
|
||||
}
|
||||
|
||||
pub fn facet_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, facet_key: &FacetKey) -> ZResult<Option<Cow<'txn, Set<DocumentId>>>> {
|
||||
self.facets.get(reader, &facet_key)
|
||||
}
|
||||
|
@ -363,10 +363,10 @@ impl Index {
|
||||
QueryBuilder::new(self)
|
||||
}
|
||||
|
||||
pub fn query_builder_with_criteria<'c, 'f, 'd, 'fa, 'i>(
|
||||
pub fn query_builder_with_criteria<'c, 'f, 'd, 'i>(
|
||||
&'i self,
|
||||
criteria: Criteria<'c>,
|
||||
) -> QueryBuilder<'c, 'f, 'd, 'fa, 'i> {
|
||||
) -> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
QueryBuilder::with_criteria(self, criteria)
|
||||
}
|
||||
}
|
||||
|
@ -27,6 +27,40 @@ pub enum ResponseError {
|
||||
PayloadTooLarge,
|
||||
UnsupportedMediaType,
|
||||
FacetExpression(String),
|
||||
FacetCount(String),
|
||||
}
|
||||
|
||||
pub enum FacetCountError {
|
||||
AttributeNotSet(String),
|
||||
SyntaxError(String),
|
||||
UnexpectedToken { found: String, expected: &'static [&'static str] },
|
||||
NoFacetSet,
|
||||
}
|
||||
|
||||
impl FacetCountError {
|
||||
pub fn unexpected_token(found: impl ToString, expected: &'static [&'static str]) -> FacetCountError {
|
||||
let found = found.to_string();
|
||||
FacetCountError::UnexpectedToken { expected, found }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::error::Error> for FacetCountError {
|
||||
fn from(other: serde_json::error::Error) -> FacetCountError {
|
||||
FacetCountError::SyntaxError(other.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FacetCountError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
use FacetCountError::*;
|
||||
|
||||
match self {
|
||||
AttributeNotSet(attr) => write!(f, "attribute {} is not set as facet", attr),
|
||||
SyntaxError(msg) => write!(f, "syntax error: {}", msg),
|
||||
UnexpectedToken { expected, found } => write!(f, "unexpected {} found, expected {:?}", found, expected),
|
||||
NoFacetSet => write!(f, "can't perform facet count, as no facet is set"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ResponseError {
|
||||
@ -112,7 +146,8 @@ impl fmt::Display for ResponseError {
|
||||
Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
|
||||
Self::FacetExpression(e) => write!(f, "error parsing facet filter expression: {}", e),
|
||||
Self::PayloadTooLarge => f.write_str("Payload to large"),
|
||||
Self::UnsupportedMediaType => f.write_str("Unsupported media type")
|
||||
Self::UnsupportedMediaType => f.write_str("Unsupported media type"),
|
||||
Self::FacetCount(e) => write!(f, "error with facet count: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -134,6 +169,7 @@ impl aweb::error::ResponseError for ResponseError {
|
||||
| Self::RetrieveDocument(_, _)
|
||||
| Self::FacetExpression(_)
|
||||
| Self::SearchDocuments(_)
|
||||
| Self::FacetCount(_)
|
||||
| Self::FilterParsing(_) => StatusCode::BAD_REQUEST,
|
||||
Self::DocumentNotFound(_)
|
||||
| Self::IndexNotFound(_)
|
||||
@ -198,18 +234,23 @@ impl From<actix_http::Error> for ResponseError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JsonPayloadError> for ResponseError {
|
||||
fn from(err: JsonPayloadError) -> ResponseError {
|
||||
match err {
|
||||
JsonPayloadError::Deserialize(err) => ResponseError::BadRequest(format!("Invalid JSON: {}", err)),
|
||||
JsonPayloadError::Overflow => ResponseError::PayloadTooLarge,
|
||||
JsonPayloadError::ContentType => ResponseError::UnsupportedMediaType,
|
||||
JsonPayloadError::Payload(err) => ResponseError::BadRequest(format!("Problem while decoding the request: {}", err)),
|
||||
}
|
||||
impl From<FacetCountError> for ResponseError {
|
||||
fn from(other: FacetCountError) -> ResponseError {
|
||||
ResponseError::FacetCount(other.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JsonPayloadError> for ResponseError {
|
||||
fn from(err: JsonPayloadError) -> ResponseError {
|
||||
match err {
|
||||
JsonPayloadError::Deserialize(err) => ResponseError::BadRequest(format!("Invalid JSON: {}", err)),
|
||||
JsonPayloadError::Overflow => ResponseError::PayloadTooLarge,
|
||||
JsonPayloadError::ContentType => ResponseError::UnsupportedMediaType,
|
||||
JsonPayloadError::Payload(err) => ResponseError::BadRequest(format!("Problem while decoding the request: {}", err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn json_error_handler(err: JsonPayloadError) -> ResponseError {
|
||||
err.into()
|
||||
err.into()
|
||||
}
|
||||
|
@ -36,6 +36,7 @@ impl IndexSearchExt for Index {
|
||||
filters: None,
|
||||
matches: false,
|
||||
facet_filters: None,
|
||||
facets: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -51,6 +52,7 @@ pub struct SearchBuilder<'a> {
|
||||
filters: Option<String>,
|
||||
matches: bool,
|
||||
facet_filters: Option<FacetFilter>,
|
||||
facets: Option<Vec<(FieldId, String)>>
|
||||
}
|
||||
|
||||
impl<'a> SearchBuilder<'a> {
|
||||
@ -100,7 +102,12 @@ impl<'a> SearchBuilder<'a> {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn search(&self, reader: &heed::RoTxn<MainT>) -> Result<SearchResult, ResponseError> {
|
||||
pub fn add_facets(&mut self, facets: Vec<(FieldId, String)>) -> &SearchBuilder {
|
||||
self.facets = Some(facets);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn search(self, reader: &heed::RoTxn<MainT>) -> Result<SearchResult, ResponseError> {
|
||||
let schema = self
|
||||
.index
|
||||
.main
|
||||
@ -117,8 +124,8 @@ impl<'a> SearchBuilder<'a> {
|
||||
|
||||
if let Some(filter_expression) = &self.filters {
|
||||
let filter = Filter::parse(filter_expression, &schema)?;
|
||||
let index = &self.index;
|
||||
query_builder.with_filter(move |id| {
|
||||
let index = &self.index;
|
||||
let reader = &reader;
|
||||
let filter = &filter;
|
||||
match filter.test(reader, index, id) {
|
||||
@ -133,8 +140,9 @@ impl<'a> SearchBuilder<'a> {
|
||||
|
||||
if let Some(field) = self.index.main.distinct_attribute(reader)? {
|
||||
if let Some(field_id) = schema.id(&field) {
|
||||
let index = &self.index;
|
||||
query_builder.with_distinct(1, move |id| {
|
||||
match self.index.document_attribute_bytes(reader, id, field_id) {
|
||||
match index.document_attribute_bytes(reader, id, field_id) {
|
||||
Ok(Some(bytes)) => {
|
||||
let mut s = SipHasher::new();
|
||||
bytes.hash(&mut s);
|
||||
@ -146,11 +154,12 @@ impl<'a> SearchBuilder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
query_builder.set_facets(self.facet_filters.as_ref());
|
||||
query_builder.set_facet_filter(self.facet_filters);
|
||||
query_builder.set_facets(self.facets);
|
||||
|
||||
let start = Instant::now();
|
||||
let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
|
||||
let (docs, nb_hits) = result.map_err(ResponseError::search_documents)?;
|
||||
let search_result = result.map_err(ResponseError::search_documents)?;
|
||||
let time_ms = start.elapsed().as_millis() as usize;
|
||||
|
||||
let mut all_attributes: HashSet<&str> = HashSet::new();
|
||||
@ -181,7 +190,7 @@ impl<'a> SearchBuilder<'a> {
|
||||
}
|
||||
|
||||
let mut hits = Vec::with_capacity(self.limit);
|
||||
for doc in docs {
|
||||
for doc in search_result.documents {
|
||||
let mut document: IndexMap<String, Value> = self
|
||||
.index
|
||||
.document(reader, Some(&all_attributes), doc.id)
|
||||
@ -235,10 +244,11 @@ impl<'a> SearchBuilder<'a> {
|
||||
hits,
|
||||
offset: self.offset,
|
||||
limit: self.limit,
|
||||
nb_hits,
|
||||
exhaustive_nb_hits: false,
|
||||
nb_hits: search_result.nb_hits,
|
||||
exhaustive_nb_hits: search_result.exhaustive_nb_hit,
|
||||
processing_time_ms: time_ms,
|
||||
query: self.query.to_string(),
|
||||
facets: search_result.facets,
|
||||
};
|
||||
|
||||
Ok(results)
|
||||
@ -323,6 +333,7 @@ pub struct SearchResult {
|
||||
pub exhaustive_nb_hits: bool,
|
||||
pub processing_time_ms: usize,
|
||||
pub query: String,
|
||||
pub facets: Option<HashMap<String, HashMap<String, usize>>>,
|
||||
}
|
||||
|
||||
/// returns the start index and the length on the crop.
|
||||
|
@ -5,14 +5,16 @@ use actix_web::web;
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web_macros::get;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::error::ResponseError;
|
||||
use crate::error::{ResponseError, FacetCountError};
|
||||
use crate::helpers::meilisearch::IndexSearchExt;
|
||||
use crate::helpers::Authentication;
|
||||
use crate::routes::IndexParam;
|
||||
use crate::Data;
|
||||
|
||||
use meilisearch_core::facets::FacetFilter;
|
||||
use meilisearch_schema::{Schema, FieldId};
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(search_with_url_query);
|
||||
@ -31,6 +33,7 @@ struct SearchQuery {
|
||||
filters: Option<String>,
|
||||
matches: Option<bool>,
|
||||
facet_filters: Option<String>,
|
||||
facets: Option<String>,
|
||||
}
|
||||
|
||||
#[get("/indexes/{index_uid}/search", wrap = "Authentication::Public")]
|
||||
@ -91,6 +94,16 @@ async fn search_with_url_query(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(facets) = ¶ms.facets {
|
||||
match index.main.attributes_for_faceting(&reader)? {
|
||||
Some(ref attrs) => {
|
||||
let field_ids = prepare_facet_list(&facets, &schema, attrs)?;
|
||||
search_builder.add_facets(field_ids);
|
||||
},
|
||||
None => return Err(FacetCountError::NoFacetSet.into())
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(attributes_to_crop) = ¶ms.attributes_to_crop {
|
||||
let default_length = params.crop_length.unwrap_or(200);
|
||||
let mut final_attributes: HashMap<String, usize> = HashMap::new();
|
||||
@ -150,3 +163,40 @@ async fn search_with_url_query(
|
||||
|
||||
Ok(HttpResponse::Ok().json(search_builder.search(&reader)?))
|
||||
}
|
||||
|
||||
/// Parses the incoming string into an array of attributes for which to return a count. It returns
|
||||
/// a Vec of attribute names ascociated with their id.
|
||||
///
|
||||
/// An error is returned if the array is malformed, or if it contains attributes that are
|
||||
/// unexisting, or not set as facets.
|
||||
fn prepare_facet_list(facets: &str, schema: &Schema, facet_attrs: &[FieldId]) -> Result<Vec<(FieldId, String)>, FacetCountError> {
|
||||
let json_array = serde_json::from_str(facets)?;
|
||||
match json_array {
|
||||
Value::Array(vals) => {
|
||||
let wildcard = Value::String("*".to_string());
|
||||
if vals.iter().any(|f| f == &wildcard) {
|
||||
let attrs = facet_attrs
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id).map(|n| (id, n.to_string())))
|
||||
.collect();
|
||||
return Ok(attrs);
|
||||
}
|
||||
let mut field_ids = Vec::with_capacity(facet_attrs.len());
|
||||
for facet in vals {
|
||||
match facet {
|
||||
Value::String(facet) => {
|
||||
if let Some(id) = schema.id(&facet) {
|
||||
if !facet_attrs.contains(&id) {
|
||||
return Err(FacetCountError::AttributeNotSet(facet));
|
||||
}
|
||||
field_ids.push((id, facet));
|
||||
}
|
||||
}
|
||||
bad_val => return Err(FacetCountError::unexpected_token(bad_val, &["String"])),
|
||||
}
|
||||
}
|
||||
Ok(field_ids)
|
||||
}
|
||||
bad_val => return Err(FacetCountError::unexpected_token(bad_val, &["[String]"]))
|
||||
}
|
||||
}
|
||||
|
@ -1292,3 +1292,54 @@ async fn test_faceted_search_invalid() {
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_ne!(status_code, 202);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_facet_count() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// test no facets set, search on color
|
||||
let query = "q=a&facets=%5B%22color%22%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color", "tags"]
|
||||
});
|
||||
server.update_all_settings(body).await;
|
||||
// same as before, but now facets are set:
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert_eq!(response.get("facets").unwrap().as_object().unwrap().values().count(), 1);
|
||||
// searching on color and tags
|
||||
let query = "q=a&facets=%5B%22color%22,%20%22tags%22%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
let facets = response.get("facets").unwrap().as_object().unwrap();
|
||||
eprintln!("response: {:#?}", response);
|
||||
assert_eq!(facets.values().count(), 2);
|
||||
assert_ne!(!facets.get("color").unwrap().as_object().unwrap().values().count(), 0);
|
||||
assert_ne!(!facets.get("tags").unwrap().as_object().unwrap().values().count(), 0);
|
||||
// wildcard
|
||||
let query = "q=a&facets=%5B%22*%22%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert_eq!(response.get("facets").unwrap().as_object().unwrap().values().count(), 2);
|
||||
// wildcard with other attributes:
|
||||
let query = "q=a&facets=%5B%22color%22,%20%22*%22%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert_eq!(response.get("facets").unwrap().as_object().unwrap().values().count(), 2);
|
||||
// empty facet list
|
||||
let query = "q=a&facets=%5B%5D";
|
||||
let (response, _status_code) = server.search(query).await;
|
||||
assert_eq!(response.get("facets").unwrap().as_object().unwrap().values().count(), 0);
|
||||
|
||||
// attr not set as facet passed:
|
||||
let query = "q=a&facets=%5B%22gender%22%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_eq!(status_code, 400);
|
||||
// string instead of array:
|
||||
let query = "q=a&facets=%22gender%22";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_eq!(status_code, 400);
|
||||
// invalid value in array:
|
||||
let query = "q=a&facets=%5B%22color%22,%20true%5D";
|
||||
let (_response, status_code) = server.search(query).await;
|
||||
assert_eq!(status_code, 400);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user