2023-03-09 18:12:31 +08:00
|
|
|
#![allow(clippy::too_many_arguments)]
|
|
|
|
|
2023-03-08 16:55:53 +08:00
|
|
|
use std::collections::VecDeque;
|
|
|
|
|
|
|
|
use fxhash::FxHashMap;
|
2023-03-09 18:12:31 +08:00
|
|
|
use heed::{BytesDecode, RoTxn};
|
2023-03-14 17:54:55 +08:00
|
|
|
use roaring::RoaringBitmap;
|
2023-03-08 16:55:53 +08:00
|
|
|
|
2023-03-09 18:12:31 +08:00
|
|
|
use super::db_cache::DatabaseCache;
|
2023-03-14 23:37:47 +08:00
|
|
|
use super::interner::{DedupInterner, Interned};
|
|
|
|
use super::query_graph::QueryNodeData;
|
2023-03-14 17:54:55 +08:00
|
|
|
use super::query_term::{Phrase, QueryTerm};
|
2023-03-07 21:42:58 +08:00
|
|
|
use super::small_bitmap::SmallBitmap;
|
2023-03-14 23:37:47 +08:00
|
|
|
use super::{QueryGraph, SearchContext};
|
2023-03-09 18:12:31 +08:00
|
|
|
use crate::{CboRoaringBitmapCodec, Index, Result, RoaringBitmapCodec};
|
2023-02-21 16:45:17 +08:00
|
|
|
|
|
|
|
#[derive(Default)]
|
2023-03-09 18:12:31 +08:00
|
|
|
pub struct QueryTermDocIdsCache {
|
|
|
|
pub phrases: FxHashMap<Interned<Phrase>, RoaringBitmap>,
|
2023-03-14 17:54:55 +08:00
|
|
|
pub terms: FxHashMap<Interned<QueryTerm>, RoaringBitmap>,
|
2023-02-21 20:21:41 +08:00
|
|
|
}
|
2023-03-09 18:12:31 +08:00
|
|
|
impl QueryTermDocIdsCache {
|
|
|
|
/// Get the document ids associated with the given phrase
|
2023-03-13 21:03:48 +08:00
|
|
|
pub fn get_phrase_docids<'s, 'ctx>(
|
2023-03-09 18:12:31 +08:00
|
|
|
&'s mut self,
|
|
|
|
index: &Index,
|
2023-03-13 21:03:48 +08:00
|
|
|
txn: &'ctx RoTxn,
|
|
|
|
db_cache: &mut DatabaseCache<'ctx>,
|
2023-03-14 23:37:47 +08:00
|
|
|
word_interner: &DedupInterner<String>,
|
|
|
|
phrase_interner: &DedupInterner<Phrase>,
|
2023-03-09 18:12:31 +08:00
|
|
|
phrase: Interned<Phrase>,
|
|
|
|
) -> Result<&'s RoaringBitmap> {
|
|
|
|
if self.phrases.contains_key(&phrase) {
|
|
|
|
return Ok(&self.phrases[&phrase]);
|
2023-02-21 20:21:41 +08:00
|
|
|
};
|
2023-03-09 18:12:31 +08:00
|
|
|
let docids = resolve_phrase(index, txn, db_cache, word_interner, phrase_interner, phrase)?;
|
|
|
|
let _ = self.phrases.insert(phrase, docids);
|
|
|
|
let docids = &self.phrases[&phrase];
|
|
|
|
Ok(docids)
|
|
|
|
}
|
2023-03-14 17:54:55 +08:00
|
|
|
/// Get the document ids associated with the given term
|
|
|
|
pub fn get_query_term_docids<'s, 'ctx>(
|
2023-03-09 18:12:31 +08:00
|
|
|
&'s mut self,
|
|
|
|
index: &Index,
|
2023-03-13 21:03:48 +08:00
|
|
|
txn: &'ctx RoTxn,
|
|
|
|
db_cache: &mut DatabaseCache<'ctx>,
|
2023-03-14 23:37:47 +08:00
|
|
|
word_interner: &DedupInterner<String>,
|
|
|
|
term_interner: &DedupInterner<QueryTerm>,
|
|
|
|
phrase_interner: &DedupInterner<Phrase>,
|
2023-03-14 17:54:55 +08:00
|
|
|
term_interned: Interned<QueryTerm>,
|
2023-03-09 18:12:31 +08:00
|
|
|
) -> Result<&'s RoaringBitmap> {
|
2023-03-14 17:54:55 +08:00
|
|
|
if self.terms.contains_key(&term_interned) {
|
|
|
|
return Ok(&self.terms[&term_interned]);
|
2023-02-21 20:21:41 +08:00
|
|
|
};
|
2023-03-14 17:54:55 +08:00
|
|
|
let mut docids = RoaringBitmap::new();
|
2023-03-23 17:18:24 +08:00
|
|
|
// TODO: use a MultiOps?
|
2023-03-14 17:54:55 +08:00
|
|
|
let term = term_interner.get(term_interned);
|
|
|
|
for word in term.all_single_words_except_prefix_db() {
|
2023-03-09 18:12:31 +08:00
|
|
|
if let Some(word_docids) = db_cache.get_word_docids(index, txn, word_interner, word)? {
|
2023-03-14 17:54:55 +08:00
|
|
|
docids |=
|
|
|
|
RoaringBitmapCodec::bytes_decode(word_docids).ok_or(heed::Error::Decoding)?;
|
2023-03-09 18:12:31 +08:00
|
|
|
}
|
|
|
|
}
|
2023-03-14 17:54:55 +08:00
|
|
|
for phrase in term.all_phrases() {
|
|
|
|
docids |= self.get_phrase_docids(
|
2023-03-09 18:12:31 +08:00
|
|
|
index,
|
|
|
|
txn,
|
|
|
|
db_cache,
|
|
|
|
word_interner,
|
|
|
|
phrase_interner,
|
2023-03-14 17:54:55 +08:00
|
|
|
phrase,
|
|
|
|
)?;
|
2023-03-09 18:12:31 +08:00
|
|
|
}
|
|
|
|
|
2023-03-14 17:54:55 +08:00
|
|
|
if let Some(prefix) = term.use_prefix_db {
|
|
|
|
if let Some(prefix_docids) =
|
|
|
|
db_cache.get_word_prefix_docids(index, txn, word_interner, prefix)?
|
|
|
|
{
|
|
|
|
docids |=
|
|
|
|
RoaringBitmapCodec::bytes_decode(prefix_docids).ok_or(heed::Error::Decoding)?;
|
2023-03-09 18:12:31 +08:00
|
|
|
}
|
|
|
|
}
|
2023-03-14 17:54:55 +08:00
|
|
|
|
|
|
|
let _ = self.terms.insert(term_interned, docids);
|
|
|
|
let docids = &self.terms[&term_interned];
|
|
|
|
Ok(docids)
|
2023-03-09 18:12:31 +08:00
|
|
|
}
|
2023-02-21 16:45:17 +08:00
|
|
|
}
|
|
|
|
|
2023-03-23 16:15:57 +08:00
|
|
|
pub fn resolve_query_graph(
|
|
|
|
ctx: &mut SearchContext,
|
2023-02-21 16:45:17 +08:00
|
|
|
q: &QueryGraph,
|
|
|
|
universe: &RoaringBitmap,
|
|
|
|
) -> Result<RoaringBitmap> {
|
2023-03-09 18:12:31 +08:00
|
|
|
let SearchContext {
|
|
|
|
index,
|
|
|
|
txn,
|
|
|
|
db_cache,
|
|
|
|
word_interner,
|
|
|
|
phrase_interner,
|
2023-03-14 17:54:55 +08:00
|
|
|
term_interner,
|
|
|
|
term_docids: query_term_docids,
|
2023-03-09 22:53:59 +08:00
|
|
|
..
|
2023-03-09 18:12:31 +08:00
|
|
|
} = ctx;
|
|
|
|
// TODO: there is a faster way to compute this big
|
2023-02-21 16:45:17 +08:00
|
|
|
// roaring bitmap expression
|
|
|
|
|
2023-03-14 23:37:47 +08:00
|
|
|
let mut nodes_resolved = SmallBitmap::for_interned_values_in(&q.nodes);
|
|
|
|
let mut path_nodes_docids = q.nodes.map(|_| RoaringBitmap::new());
|
2023-02-21 16:45:17 +08:00
|
|
|
|
|
|
|
let mut next_nodes_to_visit = VecDeque::new();
|
2023-03-09 18:12:31 +08:00
|
|
|
next_nodes_to_visit.push_back(q.root_node);
|
2023-02-21 16:45:17 +08:00
|
|
|
|
2023-03-14 23:37:47 +08:00
|
|
|
while let Some(node_id) = next_nodes_to_visit.pop_front() {
|
|
|
|
let node = q.nodes.get(node_id);
|
|
|
|
let predecessors = &node.predecessors;
|
2023-02-21 16:45:17 +08:00
|
|
|
if !predecessors.is_subset(&nodes_resolved) {
|
2023-03-14 23:37:47 +08:00
|
|
|
next_nodes_to_visit.push_back(node_id);
|
2023-02-21 16:45:17 +08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// Take union of all predecessors
|
2023-03-07 21:42:58 +08:00
|
|
|
let mut predecessors_docids = RoaringBitmap::new();
|
|
|
|
for p in predecessors.iter() {
|
2023-03-14 23:37:47 +08:00
|
|
|
predecessors_docids |= path_nodes_docids.get(p);
|
2023-03-07 21:42:58 +08:00
|
|
|
}
|
2023-02-21 16:45:17 +08:00
|
|
|
|
2023-03-14 23:37:47 +08:00
|
|
|
let node_docids = match &node.data {
|
|
|
|
QueryNodeData::Term(located_term) => {
|
2023-03-14 17:54:55 +08:00
|
|
|
let term_docids = query_term_docids.get_query_term_docids(
|
2023-03-09 18:12:31 +08:00
|
|
|
index,
|
|
|
|
txn,
|
|
|
|
db_cache,
|
|
|
|
word_interner,
|
2023-03-14 17:54:55 +08:00
|
|
|
term_interner,
|
2023-03-09 18:12:31 +08:00
|
|
|
phrase_interner,
|
2023-03-14 17:54:55 +08:00
|
|
|
located_term.value,
|
2023-03-09 18:12:31 +08:00
|
|
|
)?;
|
2023-03-14 17:54:55 +08:00
|
|
|
predecessors_docids & term_docids
|
2023-02-21 16:45:17 +08:00
|
|
|
}
|
2023-03-14 23:37:47 +08:00
|
|
|
QueryNodeData::Deleted => {
|
2023-02-21 20:21:41 +08:00
|
|
|
panic!()
|
2023-02-21 16:45:17 +08:00
|
|
|
}
|
2023-03-14 23:37:47 +08:00
|
|
|
QueryNodeData::Start => universe.clone(),
|
|
|
|
QueryNodeData::End => {
|
2023-02-21 16:45:17 +08:00
|
|
|
return Ok(predecessors_docids);
|
|
|
|
}
|
|
|
|
};
|
2023-03-14 23:37:47 +08:00
|
|
|
nodes_resolved.insert(node_id);
|
|
|
|
*path_nodes_docids.get_mut(node_id) = node_docids;
|
2023-02-21 16:45:17 +08:00
|
|
|
|
2023-03-14 23:37:47 +08:00
|
|
|
for succ in node.successors.iter() {
|
2023-02-21 19:55:44 +08:00
|
|
|
if !next_nodes_to_visit.contains(&succ) && !nodes_resolved.contains(succ) {
|
|
|
|
next_nodes_to_visit.push_back(succ);
|
2023-02-21 16:45:17 +08:00
|
|
|
}
|
|
|
|
}
|
2023-02-21 20:57:34 +08:00
|
|
|
|
2023-03-14 23:37:47 +08:00
|
|
|
for prec in node.predecessors.iter() {
|
|
|
|
if q.nodes.get(prec).successors.is_subset(&nodes_resolved) {
|
|
|
|
path_nodes_docids.get_mut(prec).clear();
|
2023-02-21 16:45:17 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
panic!()
|
|
|
|
}
|
2023-03-03 04:27:57 +08:00
|
|
|
|
2023-03-13 21:03:48 +08:00
|
|
|
pub fn resolve_phrase<'ctx>(
|
2023-03-09 18:12:31 +08:00
|
|
|
index: &Index,
|
2023-03-13 21:03:48 +08:00
|
|
|
txn: &'ctx RoTxn,
|
|
|
|
db_cache: &mut DatabaseCache<'ctx>,
|
2023-03-14 23:37:47 +08:00
|
|
|
word_interner: &DedupInterner<String>,
|
|
|
|
phrase_interner: &DedupInterner<Phrase>,
|
2023-03-09 18:12:31 +08:00
|
|
|
phrase: Interned<Phrase>,
|
|
|
|
) -> Result<RoaringBitmap> {
|
|
|
|
let Phrase { words } = phrase_interner.get(phrase).clone();
|
2023-03-03 04:27:57 +08:00
|
|
|
let mut candidates = RoaringBitmap::new();
|
|
|
|
let mut first_iter = true;
|
|
|
|
let winsize = words.len().min(3);
|
|
|
|
|
|
|
|
if words.is_empty() {
|
|
|
|
return Ok(candidates);
|
|
|
|
}
|
|
|
|
|
|
|
|
for win in words.windows(winsize) {
|
|
|
|
// Get all the documents with the matching distance for each word pairs.
|
|
|
|
let mut bitmaps = Vec::with_capacity(winsize.pow(2));
|
2023-03-07 02:21:55 +08:00
|
|
|
for (offset, &s1) in win
|
2023-03-03 04:27:57 +08:00
|
|
|
.iter()
|
|
|
|
.enumerate()
|
|
|
|
.filter_map(|(index, word)| word.as_ref().map(|word| (index, word)))
|
|
|
|
{
|
2023-03-07 02:21:55 +08:00
|
|
|
for (dist, &s2) in win
|
2023-03-03 04:27:57 +08:00
|
|
|
.iter()
|
|
|
|
.skip(offset + 1)
|
|
|
|
.enumerate()
|
|
|
|
.filter_map(|(index, word)| word.as_ref().map(|word| (index, word)))
|
|
|
|
{
|
|
|
|
if dist == 0 {
|
2023-03-09 18:12:31 +08:00
|
|
|
match db_cache.get_word_pair_proximity_docids(
|
|
|
|
index,
|
|
|
|
txn,
|
|
|
|
word_interner,
|
|
|
|
s1,
|
|
|
|
s2,
|
|
|
|
1,
|
|
|
|
)? {
|
2023-03-03 04:27:57 +08:00
|
|
|
Some(m) => bitmaps.push(CboRoaringBitmapCodec::deserialize_from(m)?),
|
|
|
|
// If there are no documents for this pair, there will be no
|
|
|
|
// results for the phrase query.
|
|
|
|
None => return Ok(RoaringBitmap::new()),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
let mut bitmap = RoaringBitmap::new();
|
|
|
|
for dist in 0..=dist {
|
2023-03-09 18:12:31 +08:00
|
|
|
if let Some(m) = db_cache.get_word_pair_proximity_docids(
|
|
|
|
index,
|
|
|
|
txn,
|
|
|
|
word_interner,
|
|
|
|
s1,
|
|
|
|
s2,
|
|
|
|
dist as u8 + 1,
|
|
|
|
)? {
|
2023-03-03 04:27:57 +08:00
|
|
|
bitmap |= CboRoaringBitmapCodec::deserialize_from(m)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if bitmap.is_empty() {
|
|
|
|
return Ok(bitmap);
|
|
|
|
} else {
|
|
|
|
bitmaps.push(bitmap);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// We sort the bitmaps so that we perform the small intersections first, which is faster.
|
|
|
|
bitmaps.sort_unstable_by_key(|a| a.len());
|
|
|
|
|
|
|
|
for bitmap in bitmaps {
|
|
|
|
if first_iter {
|
|
|
|
candidates = bitmap;
|
|
|
|
first_iter = false;
|
|
|
|
} else {
|
|
|
|
candidates &= bitmap;
|
|
|
|
}
|
|
|
|
// There will be no match, return early
|
|
|
|
if candidates.is_empty() {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(candidates)
|
|
|
|
}
|