mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-26 12:05:05 +08:00
Make milli use edition 2021 (#4770)
* Make milli use edition 2021 * Add lifetime annotations to milli. * Run cargo fmt
This commit is contained in:
parent
aac15f6719
commit
0a40a98bb6
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "milli"
|
name = "milli"
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
version.workspace = true
|
version.workspace = true
|
||||||
|
@ -95,7 +95,7 @@ impl<R: io::Read + io::Seek> EnrichedDocumentsBatchCursor<R> {
|
|||||||
/// `next_document` advance the document reader until all the documents have been read.
|
/// `next_document` advance the document reader until all the documents have been read.
|
||||||
pub fn next_enriched_document(
|
pub fn next_enriched_document(
|
||||||
&mut self,
|
&mut self,
|
||||||
) -> Result<Option<EnrichedDocument>, DocumentsBatchCursorError> {
|
) -> Result<Option<EnrichedDocument<'_>>, DocumentsBatchCursorError> {
|
||||||
let document = self.documents.next_document()?;
|
let document = self.documents.next_document()?;
|
||||||
let document_id = match self.external_ids.move_on_next()? {
|
let document_id = match self.external_ids.move_on_next()? {
|
||||||
Some((_, bytes)) => serde_json::from_slice(bytes).map(Some)?,
|
Some((_, bytes)) => serde_json::from_slice(bytes).map(Some)?,
|
||||||
|
@ -27,7 +27,7 @@ use crate::{FieldId, Object, Result};
|
|||||||
const DOCUMENTS_BATCH_INDEX_KEY: [u8; 8] = u64::MAX.to_be_bytes();
|
const DOCUMENTS_BATCH_INDEX_KEY: [u8; 8] = u64::MAX.to_be_bytes();
|
||||||
|
|
||||||
/// Helper function to convert an obkv reader into a JSON object.
|
/// Helper function to convert an obkv reader into a JSON object.
|
||||||
pub fn obkv_to_object(obkv: &KvReader<FieldId>, index: &DocumentsBatchIndex) -> Result<Object> {
|
pub fn obkv_to_object(obkv: &KvReader<'_, FieldId>, index: &DocumentsBatchIndex) -> Result<Object> {
|
||||||
obkv.iter()
|
obkv.iter()
|
||||||
.map(|(field_id, value)| {
|
.map(|(field_id, value)| {
|
||||||
let field_name = index
|
let field_name = index
|
||||||
@ -64,7 +64,7 @@ impl DocumentsBatchIndex {
|
|||||||
self.0.len()
|
self.0.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(&self) -> bimap::hash::Iter<FieldId, String> {
|
pub fn iter(&self) -> bimap::hash::Iter<'_, FieldId, String> {
|
||||||
self.0.iter()
|
self.0.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -76,7 +76,7 @@ impl DocumentsBatchIndex {
|
|||||||
self.0.get_by_right(name).cloned()
|
self.0.get_by_right(name).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn recreate_json(&self, document: &obkv::KvReaderU16) -> Result<Object> {
|
pub fn recreate_json(&self, document: &obkv::KvReaderU16<'_>) -> Result<Object> {
|
||||||
let mut map = Object::new();
|
let mut map = Object::new();
|
||||||
|
|
||||||
for (k, v) in document.iter() {
|
for (k, v) in document.iter() {
|
||||||
|
@ -52,7 +52,7 @@ impl<'a> PrimaryKey<'a> {
|
|||||||
|
|
||||||
pub fn document_id(
|
pub fn document_id(
|
||||||
&self,
|
&self,
|
||||||
document: &obkv::KvReader<FieldId>,
|
document: &obkv::KvReader<'_, FieldId>,
|
||||||
fields: &impl FieldIdMapper,
|
fields: &impl FieldIdMapper,
|
||||||
) -> Result<StdResult<String, DocumentIdExtractionError>> {
|
) -> Result<StdResult<String, DocumentIdExtractionError>> {
|
||||||
match self {
|
match self {
|
||||||
|
@ -76,7 +76,7 @@ impl<R: io::Read + io::Seek> DocumentsBatchCursor<R> {
|
|||||||
/// `next_document` advance the document reader until all the documents have been read.
|
/// `next_document` advance the document reader until all the documents have been read.
|
||||||
pub fn next_document(
|
pub fn next_document(
|
||||||
&mut self,
|
&mut self,
|
||||||
) -> Result<Option<KvReader<FieldId>>, DocumentsBatchCursorError> {
|
) -> Result<Option<KvReader<'_, FieldId>>, DocumentsBatchCursorError> {
|
||||||
match self.cursor.move_on_next()? {
|
match self.cursor.move_on_next()? {
|
||||||
Some((key, value)) if key != DOCUMENTS_BATCH_INDEX_KEY => {
|
Some((key, value)) if key != DOCUMENTS_BATCH_INDEX_KEY => {
|
||||||
Ok(Some(KvReader::new(value)))
|
Ok(Some(KvReader::new(value)))
|
||||||
@ -108,7 +108,7 @@ impl From<serde_json::Error> for DocumentsBatchCursorError {
|
|||||||
impl error::Error for DocumentsBatchCursorError {}
|
impl error::Error for DocumentsBatchCursorError {}
|
||||||
|
|
||||||
impl fmt::Display for DocumentsBatchCursorError {
|
impl fmt::Display for DocumentsBatchCursorError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
DocumentsBatchCursorError::Grenad(e) => e.fmt(f),
|
DocumentsBatchCursorError::Grenad(e) => e.fmt(f),
|
||||||
DocumentsBatchCursorError::SerdeJson(e) => e.fmt(f),
|
DocumentsBatchCursorError::SerdeJson(e) => e.fmt(f),
|
||||||
|
@ -56,7 +56,7 @@ impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
|
|||||||
Ok(Ok(()))
|
Ok(Ok(()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "a documents, or a sequence of documents.")
|
write!(f, "a documents, or a sequence of documents.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -24,17 +24,21 @@ impl ExternalDocumentsIds {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if hard and soft external documents lists are empty.
|
/// Returns `true` if hard and soft external documents lists are empty.
|
||||||
pub fn is_empty(&self, rtxn: &RoTxn) -> heed::Result<bool> {
|
pub fn is_empty(&self, rtxn: &RoTxn<'_>) -> heed::Result<bool> {
|
||||||
self.0.is_empty(rtxn).map_err(Into::into)
|
self.0.is_empty(rtxn).map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get<A: AsRef<str>>(&self, rtxn: &RoTxn, external_id: A) -> heed::Result<Option<u32>> {
|
pub fn get<A: AsRef<str>>(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn<'_>,
|
||||||
|
external_id: A,
|
||||||
|
) -> heed::Result<Option<u32>> {
|
||||||
self.0.get(rtxn, external_id.as_ref())
|
self.0.get(rtxn, external_id.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An helper function to debug this type, returns an `HashMap` of both,
|
/// An helper function to debug this type, returns an `HashMap` of both,
|
||||||
/// soft and hard fst maps, combined.
|
/// soft and hard fst maps, combined.
|
||||||
pub fn to_hash_map(&self, rtxn: &RoTxn) -> heed::Result<HashMap<String, u32>> {
|
pub fn to_hash_map(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashMap<String, u32>> {
|
||||||
let mut map = HashMap::default();
|
let mut map = HashMap::default();
|
||||||
for result in self.0.iter(rtxn)? {
|
for result in self.0.iter(rtxn)? {
|
||||||
let (external, internal) = result?;
|
let (external, internal) = result?;
|
||||||
@ -51,7 +55,11 @@ impl ExternalDocumentsIds {
|
|||||||
///
|
///
|
||||||
/// - If attempting to delete a document that doesn't exist
|
/// - If attempting to delete a document that doesn't exist
|
||||||
/// - If attempting to create a document that already exists
|
/// - If attempting to create a document that already exists
|
||||||
pub fn apply(&self, wtxn: &mut RwTxn, operations: Vec<DocumentOperation>) -> heed::Result<()> {
|
pub fn apply(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn<'_>,
|
||||||
|
operations: Vec<DocumentOperation>,
|
||||||
|
) -> heed::Result<()> {
|
||||||
for DocumentOperation { external_id, internal_id, kind } in operations {
|
for DocumentOperation { external_id, internal_id, kind } in operations {
|
||||||
match kind {
|
match kind {
|
||||||
DocumentOperationKind::Create => {
|
DocumentOperationKind::Create => {
|
||||||
@ -69,7 +77,7 @@ impl ExternalDocumentsIds {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator over all the external ids.
|
/// Returns an iterator over all the external ids.
|
||||||
pub fn iter<'t>(&self, rtxn: &'t RoTxn) -> heed::Result<RoIter<'t, Str, BEU32>> {
|
pub fn iter<'t>(&self, rtxn: &'t RoTxn<'_>) -> heed::Result<RoIter<'t, Str, BEU32>> {
|
||||||
self.0.iter(rtxn)
|
self.0.iter(rtxn)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,7 @@ pub enum FacetType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for FacetType {
|
impl fmt::Display for FacetType {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
FacetType::String => f.write_str("string"),
|
FacetType::String => f.write_str("string"),
|
||||||
FacetType::Number => f.write_str("number"),
|
FacetType::Number => f.write_str("number"),
|
||||||
@ -37,7 +37,7 @@ impl FromStr for FacetType {
|
|||||||
pub struct InvalidFacetType;
|
pub struct InvalidFacetType;
|
||||||
|
|
||||||
impl fmt::Display for InvalidFacetType {
|
impl fmt::Display for InvalidFacetType {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.write_str(r#"Invalid facet type, must be "string" or "number""#)
|
f.write_str(r#"Invalid facet type, must be "string" or "number""#)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@ impl<'a> heed::BytesDecode<'a> for BEU16StrCodec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for BEU16StrCodec {
|
impl<'a> heed::BytesEncode<'a> for BEU16StrCodec {
|
||||||
type EItem = (u16, &'a str);
|
type EItem = (u16, &'a str);
|
||||||
|
|
||||||
fn bytes_encode((n, s): &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((n, s): &Self::EItem) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let mut bytes = Vec::with_capacity(s.len() + 2);
|
let mut bytes = Vec::with_capacity(s.len() + 2);
|
||||||
bytes.extend_from_slice(&n.to_be_bytes());
|
bytes.extend_from_slice(&n.to_be_bytes());
|
||||||
bytes.extend_from_slice(s.as_bytes());
|
bytes.extend_from_slice(s.as_bytes());
|
||||||
|
@ -20,7 +20,7 @@ impl<'a> heed::BytesDecode<'a> for BEU32StrCodec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for BEU32StrCodec {
|
impl<'a> heed::BytesEncode<'a> for BEU32StrCodec {
|
||||||
type EItem = (u32, &'a str);
|
type EItem = (u32, &'a str);
|
||||||
|
|
||||||
fn bytes_encode((n, s): &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((n, s): &Self::EItem) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let mut bytes = Vec::with_capacity(s.len() + 4);
|
let mut bytes = Vec::with_capacity(s.len() + 4);
|
||||||
bytes.extend_from_slice(&n.to_be_bytes());
|
bytes.extend_from_slice(&n.to_be_bytes());
|
||||||
bytes.extend_from_slice(s.as_bytes());
|
bytes.extend_from_slice(s.as_bytes());
|
||||||
|
@ -35,7 +35,7 @@ where
|
|||||||
|
|
||||||
fn bytes_encode(
|
fn bytes_encode(
|
||||||
(field_id, document_id, value): &'a Self::EItem,
|
(field_id, document_id, value): &'a Self::EItem,
|
||||||
) -> Result<Cow<[u8]>, BoxedError> {
|
) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let mut bytes = Vec::with_capacity(32);
|
let mut bytes = Vec::with_capacity(32);
|
||||||
bytes.extend_from_slice(&field_id.to_be_bytes()); // 2 bytes
|
bytes.extend_from_slice(&field_id.to_be_bytes()); // 2 bytes
|
||||||
bytes.extend_from_slice(&document_id.to_be_bytes()); // 4 bytes
|
bytes.extend_from_slice(&document_id.to_be_bytes()); // 4 bytes
|
||||||
|
@ -24,7 +24,7 @@ impl<'a> BytesDecode<'a> for OrderedF64Codec {
|
|||||||
impl heed::BytesEncode<'_> for OrderedF64Codec {
|
impl heed::BytesEncode<'_> for OrderedF64Codec {
|
||||||
type EItem = f64;
|
type EItem = f64;
|
||||||
|
|
||||||
fn bytes_encode(f: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode(f: &Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||||
let mut buffer = [0u8; 16];
|
let mut buffer = [0u8; 16];
|
||||||
|
|
||||||
// write the globally ordered float
|
// write the globally ordered float
|
||||||
|
@ -21,7 +21,7 @@ impl<'a> heed::BytesDecode<'a> for FieldIdWordCountCodec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for FieldIdWordCountCodec {
|
impl<'a> heed::BytesEncode<'a> for FieldIdWordCountCodec {
|
||||||
type EItem = (FieldId, u8);
|
type EItem = (FieldId, u8);
|
||||||
|
|
||||||
fn bytes_encode((field_id, word_count): &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((field_id, word_count): &Self::EItem) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let mut bytes = Vec::with_capacity(2 + 1);
|
let mut bytes = Vec::with_capacity(2 + 1);
|
||||||
bytes.extend_from_slice(&field_id.to_be_bytes());
|
bytes.extend_from_slice(&field_id.to_be_bytes());
|
||||||
bytes.push(*word_count);
|
bytes.push(*word_count);
|
||||||
|
@ -16,7 +16,7 @@ impl<'a> heed::BytesDecode<'a> for ObkvCodec {
|
|||||||
impl heed::BytesEncode<'_> for ObkvCodec {
|
impl heed::BytesEncode<'_> for ObkvCodec {
|
||||||
type EItem = KvWriterU16<Vec<u8>>;
|
type EItem = KvWriterU16<Vec<u8>>;
|
||||||
|
|
||||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode(item: &Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||||
item.clone().into_inner().map(Cow::Owned).map_err(Into::into)
|
item.clone().into_inner().map(Cow::Owned).map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -42,7 +42,7 @@ impl BytesDecodeOwned for BoRoaringBitmapCodec {
|
|||||||
impl heed::BytesEncode<'_> for BoRoaringBitmapCodec {
|
impl heed::BytesEncode<'_> for BoRoaringBitmapCodec {
|
||||||
type EItem = RoaringBitmap;
|
type EItem = RoaringBitmap;
|
||||||
|
|
||||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode(item: &Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
BoRoaringBitmapCodec::serialize_into(item, &mut out);
|
BoRoaringBitmapCodec::serialize_into(item, &mut out);
|
||||||
Ok(Cow::Owned(out))
|
Ok(Cow::Owned(out))
|
||||||
|
@ -167,7 +167,7 @@ impl BytesDecodeOwned for CboRoaringBitmapCodec {
|
|||||||
impl heed::BytesEncode<'_> for CboRoaringBitmapCodec {
|
impl heed::BytesEncode<'_> for CboRoaringBitmapCodec {
|
||||||
type EItem = RoaringBitmap;
|
type EItem = RoaringBitmap;
|
||||||
|
|
||||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode(item: &Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||||
let mut vec = Vec::with_capacity(Self::serialized_size(item));
|
let mut vec = Vec::with_capacity(Self::serialized_size(item));
|
||||||
Self::serialize_into(item, &mut vec);
|
Self::serialize_into(item, &mut vec);
|
||||||
Ok(Cow::Owned(vec))
|
Ok(Cow::Owned(vec))
|
||||||
|
@ -26,7 +26,7 @@ impl BytesDecodeOwned for RoaringBitmapCodec {
|
|||||||
impl heed::BytesEncode<'_> for RoaringBitmapCodec {
|
impl heed::BytesEncode<'_> for RoaringBitmapCodec {
|
||||||
type EItem = RoaringBitmap;
|
type EItem = RoaringBitmap;
|
||||||
|
|
||||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode(item: &Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||||
let mut bytes = Vec::with_capacity(item.serialized_size());
|
let mut bytes = Vec::with_capacity(item.serialized_size());
|
||||||
item.serialize_into(&mut bytes)?;
|
item.serialize_into(&mut bytes)?;
|
||||||
Ok(Cow::Owned(bytes))
|
Ok(Cow::Owned(bytes))
|
||||||
|
@ -25,7 +25,7 @@ impl<'a> heed::BytesDecode<'a> for ScriptLanguageCodec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for ScriptLanguageCodec {
|
impl<'a> heed::BytesEncode<'a> for ScriptLanguageCodec {
|
||||||
type EItem = (Script, Language);
|
type EItem = (Script, Language);
|
||||||
|
|
||||||
fn bytes_encode((script, lan): &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((script, lan): &Self::EItem) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let script_name = script.name().as_bytes();
|
let script_name = script.name().as_bytes();
|
||||||
let lan_name = lan.name().as_bytes();
|
let lan_name = lan.name().as_bytes();
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ impl<'a> heed::BytesDecode<'a> for StrBEU32Codec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for StrBEU32Codec {
|
impl<'a> heed::BytesEncode<'a> for StrBEU32Codec {
|
||||||
type EItem = (&'a str, u32);
|
type EItem = (&'a str, u32);
|
||||||
|
|
||||||
fn bytes_encode((word, pos): &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((word, pos): &Self::EItem) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let pos = pos.to_be_bytes();
|
let pos = pos.to_be_bytes();
|
||||||
|
|
||||||
let mut bytes = Vec::with_capacity(word.len() + pos.len());
|
let mut bytes = Vec::with_capacity(word.len() + pos.len());
|
||||||
@ -66,7 +66,7 @@ impl<'a> heed::BytesDecode<'a> for StrBEU16Codec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for StrBEU16Codec {
|
impl<'a> heed::BytesEncode<'a> for StrBEU16Codec {
|
||||||
type EItem = (&'a str, u16);
|
type EItem = (&'a str, u16);
|
||||||
|
|
||||||
fn bytes_encode((word, pos): &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((word, pos): &Self::EItem) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let pos = pos.to_be_bytes();
|
let pos = pos.to_be_bytes();
|
||||||
|
|
||||||
let mut bytes = Vec::with_capacity(word.len() + 1 + pos.len());
|
let mut bytes = Vec::with_capacity(word.len() + 1 + pos.len());
|
||||||
|
@ -24,7 +24,7 @@ impl<'a> heed::BytesDecode<'a> for U8StrStrCodec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for U8StrStrCodec {
|
impl<'a> heed::BytesEncode<'a> for U8StrStrCodec {
|
||||||
type EItem = (u8, &'a str, &'a str);
|
type EItem = (u8, &'a str, &'a str);
|
||||||
|
|
||||||
fn bytes_encode((n, s1, s2): &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((n, s1, s2): &Self::EItem) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let mut bytes = Vec::with_capacity(s1.len() + s2.len() + 1);
|
let mut bytes = Vec::with_capacity(s1.len() + s2.len() + 1);
|
||||||
bytes.push(*n);
|
bytes.push(*n);
|
||||||
bytes.extend_from_slice(s1.as_bytes());
|
bytes.extend_from_slice(s1.as_bytes());
|
||||||
@ -51,7 +51,7 @@ impl<'a> heed::BytesDecode<'a> for UncheckedU8StrStrCodec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for UncheckedU8StrStrCodec {
|
impl<'a> heed::BytesEncode<'a> for UncheckedU8StrStrCodec {
|
||||||
type EItem = (u8, &'a [u8], &'a [u8]);
|
type EItem = (u8, &'a [u8], &'a [u8]);
|
||||||
|
|
||||||
fn bytes_encode((n, s1, s2): &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((n, s1, s2): &Self::EItem) -> Result<Cow<'a, [u8]>, BoxedError> {
|
||||||
let mut bytes = Vec::with_capacity(s1.len() + s2.len() + 1);
|
let mut bytes = Vec::with_capacity(s1.len() + s2.len() + 1);
|
||||||
bytes.push(*n);
|
bytes.push(*n);
|
||||||
bytes.extend_from_slice(s1);
|
bytes.extend_from_slice(s1);
|
||||||
|
@ -287,12 +287,12 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create a write transaction to be able to write into the index.
|
/// Create a write transaction to be able to write into the index.
|
||||||
pub fn write_txn(&self) -> heed::Result<RwTxn> {
|
pub fn write_txn(&self) -> heed::Result<RwTxn<'_>> {
|
||||||
self.env.write_txn()
|
self.env.write_txn()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a read transaction to be able to read the index.
|
/// Create a read transaction to be able to read the index.
|
||||||
pub fn read_txn(&self) -> heed::Result<RoTxn> {
|
pub fn read_txn(&self) -> heed::Result<RoTxn<'_>> {
|
||||||
self.env.read_txn()
|
self.env.read_txn()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -344,7 +344,7 @@ impl Index {
|
|||||||
/// Writes the documents ids that corresponds to the user-ids-documents-ids FST.
|
/// Writes the documents ids that corresponds to the user-ids-documents-ids FST.
|
||||||
pub(crate) fn put_documents_ids(
|
pub(crate) fn put_documents_ids(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
docids: &RoaringBitmap,
|
docids: &RoaringBitmap,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, RoaringBitmapCodec>().put(
|
self.main.remap_types::<Str, RoaringBitmapCodec>().put(
|
||||||
@ -355,7 +355,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the internal documents ids.
|
/// Returns the internal documents ids.
|
||||||
pub fn documents_ids(&self, rtxn: &RoTxn) -> heed::Result<RoaringBitmap> {
|
pub fn documents_ids(&self, rtxn: &RoTxn<'_>) -> heed::Result<RoaringBitmap> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, RoaringBitmapCodec>()
|
.remap_types::<Str, RoaringBitmapCodec>()
|
||||||
@ -364,7 +364,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the number of documents indexed in the database.
|
/// Returns the number of documents indexed in the database.
|
||||||
pub fn number_of_documents(&self, rtxn: &RoTxn) -> Result<u64> {
|
pub fn number_of_documents(&self, rtxn: &RoTxn<'_>) -> Result<u64> {
|
||||||
let count = self
|
let count = self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, RoaringBitmapLenCodec>()
|
.remap_types::<Str, RoaringBitmapLenCodec>()
|
||||||
@ -375,18 +375,22 @@ impl Index {
|
|||||||
/* primary key */
|
/* primary key */
|
||||||
|
|
||||||
/// Writes the documents primary key, this is the field name that is used to store the id.
|
/// Writes the documents primary key, this is the field name that is used to store the id.
|
||||||
pub(crate) fn put_primary_key(&self, wtxn: &mut RwTxn, primary_key: &str) -> heed::Result<()> {
|
pub(crate) fn put_primary_key(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn<'_>,
|
||||||
|
primary_key: &str,
|
||||||
|
) -> heed::Result<()> {
|
||||||
self.set_updated_at(wtxn, &OffsetDateTime::now_utc())?;
|
self.set_updated_at(wtxn, &OffsetDateTime::now_utc())?;
|
||||||
self.main.remap_types::<Str, Str>().put(wtxn, main_key::PRIMARY_KEY_KEY, primary_key)
|
self.main.remap_types::<Str, Str>().put(wtxn, main_key::PRIMARY_KEY_KEY, primary_key)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deletes the primary key of the documents, this can be done to reset indexes settings.
|
/// Deletes the primary key of the documents, this can be done to reset indexes settings.
|
||||||
pub(crate) fn delete_primary_key(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_primary_key(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::PRIMARY_KEY_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::PRIMARY_KEY_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the documents primary key, `None` if it hasn't been defined.
|
/// Returns the documents primary key, `None` if it hasn't been defined.
|
||||||
pub fn primary_key<'t>(&self, rtxn: &'t RoTxn) -> heed::Result<Option<&'t str>> {
|
pub fn primary_key<'t>(&self, rtxn: &'t RoTxn<'_>) -> heed::Result<Option<&'t str>> {
|
||||||
self.main.remap_types::<Str, Str>().get(rtxn, main_key::PRIMARY_KEY_KEY)
|
self.main.remap_types::<Str, Str>().get(rtxn, main_key::PRIMARY_KEY_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -404,7 +408,7 @@ impl Index {
|
|||||||
/// (i.e. `u8`), this field id is used to identify fields in the obkv documents.
|
/// (i.e. `u8`), this field id is used to identify fields in the obkv documents.
|
||||||
pub(crate) fn put_fields_ids_map(
|
pub(crate) fn put_fields_ids_map(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
map: &FieldsIdsMap,
|
map: &FieldsIdsMap,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<FieldsIdsMap>>().put(
|
self.main.remap_types::<Str, SerdeJson<FieldsIdsMap>>().put(
|
||||||
@ -416,7 +420,7 @@ impl Index {
|
|||||||
|
|
||||||
/// Returns the fields ids map which associate the documents keys with an internal field id
|
/// Returns the fields ids map which associate the documents keys with an internal field id
|
||||||
/// (i.e. `u8`), this field id is used to identify fields in the obkv documents.
|
/// (i.e. `u8`), this field id is used to identify fields in the obkv documents.
|
||||||
pub fn fields_ids_map(&self, rtxn: &RoTxn) -> heed::Result<FieldsIdsMap> {
|
pub fn fields_ids_map(&self, rtxn: &RoTxn<'_>) -> heed::Result<FieldsIdsMap> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<FieldsIdsMap>>()
|
.remap_types::<Str, SerdeJson<FieldsIdsMap>>()
|
||||||
@ -431,7 +435,7 @@ impl Index {
|
|||||||
/// Writes the fieldids weights map which associates the field ids to their weights
|
/// Writes the fieldids weights map which associates the field ids to their weights
|
||||||
pub(crate) fn put_fieldids_weights_map(
|
pub(crate) fn put_fieldids_weights_map(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
map: &FieldidsWeightsMap,
|
map: &FieldidsWeightsMap,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<_>>().put(
|
self.main.remap_types::<Str, SerdeJson<_>>().put(
|
||||||
@ -442,7 +446,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get the fieldids weights map which associates the field ids to their weights
|
/// Get the fieldids weights map which associates the field ids to their weights
|
||||||
pub fn fieldids_weights_map(&self, rtxn: &RoTxn) -> heed::Result<FieldidsWeightsMap> {
|
pub fn fieldids_weights_map(&self, rtxn: &RoTxn<'_>) -> heed::Result<FieldidsWeightsMap> {
|
||||||
self.main
|
self.main
|
||||||
.remap_types::<Str, SerdeJson<_>>()
|
.remap_types::<Str, SerdeJson<_>>()
|
||||||
.get(rtxn, main_key::FIELDIDS_WEIGHTS_MAP_KEY)?
|
.get(rtxn, main_key::FIELDIDS_WEIGHTS_MAP_KEY)?
|
||||||
@ -455,13 +459,13 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Delete the fieldsids weights map
|
/// Delete the fieldsids weights map
|
||||||
pub fn delete_fieldids_weights_map(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub fn delete_fieldids_weights_map(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::FIELDIDS_WEIGHTS_MAP_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::FIELDIDS_WEIGHTS_MAP_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn searchable_fields_and_weights<'a>(
|
pub fn searchable_fields_and_weights<'a>(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &'a RoTxn,
|
rtxn: &'a RoTxn<'a>,
|
||||||
) -> Result<Vec<(Cow<'a, str>, FieldId, Weight)>> {
|
) -> Result<Vec<(Cow<'a, str>, FieldId, Weight)>> {
|
||||||
let fid_map = self.fields_ids_map(rtxn)?;
|
let fid_map = self.fields_ids_map(rtxn)?;
|
||||||
let weight_map = self.fieldids_weights_map(rtxn)?;
|
let weight_map = self.fieldids_weights_map(rtxn)?;
|
||||||
@ -488,7 +492,7 @@ impl Index {
|
|||||||
/// Writes the provided `rtree` which associates coordinates to documents ids.
|
/// Writes the provided `rtree` which associates coordinates to documents ids.
|
||||||
pub(crate) fn put_geo_rtree(
|
pub(crate) fn put_geo_rtree(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
rtree: &RTree<GeoPoint>,
|
rtree: &RTree<GeoPoint>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<RTree<GeoPoint>>>().put(
|
self.main.remap_types::<Str, SerdeBincode<RTree<GeoPoint>>>().put(
|
||||||
@ -499,12 +503,12 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Delete the `rtree` which associates coordinates to documents ids.
|
/// Delete the `rtree` which associates coordinates to documents ids.
|
||||||
pub(crate) fn delete_geo_rtree(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_geo_rtree(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::GEO_RTREE_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::GEO_RTREE_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the `rtree` which associates coordinates to documents ids.
|
/// Returns the `rtree` which associates coordinates to documents ids.
|
||||||
pub fn geo_rtree(&self, rtxn: &RoTxn) -> Result<Option<RTree<GeoPoint>>> {
|
pub fn geo_rtree(&self, rtxn: &RoTxn<'_>) -> Result<Option<RTree<GeoPoint>>> {
|
||||||
match self
|
match self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeBincode<RTree<GeoPoint>>>()
|
.remap_types::<Str, SerdeBincode<RTree<GeoPoint>>>()
|
||||||
@ -520,7 +524,7 @@ impl Index {
|
|||||||
/// Writes the documents ids that are faceted with a _geo field.
|
/// Writes the documents ids that are faceted with a _geo field.
|
||||||
pub(crate) fn put_geo_faceted_documents_ids(
|
pub(crate) fn put_geo_faceted_documents_ids(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
docids: &RoaringBitmap,
|
docids: &RoaringBitmap,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, RoaringBitmapCodec>().put(
|
self.main.remap_types::<Str, RoaringBitmapCodec>().put(
|
||||||
@ -531,12 +535,15 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Delete the documents ids that are faceted with a _geo field.
|
/// Delete the documents ids that are faceted with a _geo field.
|
||||||
pub(crate) fn delete_geo_faceted_documents_ids(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_geo_faceted_documents_ids(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn<'_>,
|
||||||
|
) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::GEO_FACETED_DOCUMENTS_IDS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::GEO_FACETED_DOCUMENTS_IDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieve all the documents ids that are faceted with a _geo field.
|
/// Retrieve all the documents ids that are faceted with a _geo field.
|
||||||
pub fn geo_faceted_documents_ids(&self, rtxn: &RoTxn) -> heed::Result<RoaringBitmap> {
|
pub fn geo_faceted_documents_ids(&self, rtxn: &RoTxn<'_>) -> heed::Result<RoaringBitmap> {
|
||||||
match self
|
match self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, RoaringBitmapCodec>()
|
.remap_types::<Str, RoaringBitmapCodec>()
|
||||||
@ -552,7 +559,7 @@ impl Index {
|
|||||||
/// the number of times it occurs in the documents.
|
/// the number of times it occurs in the documents.
|
||||||
pub(crate) fn put_field_distribution(
|
pub(crate) fn put_field_distribution(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
distribution: &FieldDistribution,
|
distribution: &FieldDistribution,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<FieldDistribution>>().put(
|
self.main.remap_types::<Str, SerdeJson<FieldDistribution>>().put(
|
||||||
@ -564,7 +571,7 @@ impl Index {
|
|||||||
|
|
||||||
/// Returns the field distribution which associates every field name with
|
/// Returns the field distribution which associates every field name with
|
||||||
/// the number of times it occurs in the documents.
|
/// the number of times it occurs in the documents.
|
||||||
pub fn field_distribution(&self, rtxn: &RoTxn) -> heed::Result<FieldDistribution> {
|
pub fn field_distribution(&self, rtxn: &RoTxn<'_>) -> heed::Result<FieldDistribution> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<FieldDistribution>>()
|
.remap_types::<Str, SerdeJson<FieldDistribution>>()
|
||||||
@ -578,7 +585,7 @@ impl Index {
|
|||||||
/// There must be not be any duplicate field id.
|
/// There must be not be any duplicate field id.
|
||||||
pub(crate) fn put_displayed_fields(
|
pub(crate) fn put_displayed_fields(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
fields: &[&str],
|
fields: &[&str],
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<&[&str]>>().put(
|
self.main.remap_types::<Str, SerdeBincode<&[&str]>>().put(
|
||||||
@ -590,20 +597,20 @@ impl Index {
|
|||||||
|
|
||||||
/// Deletes the displayed fields ids, this will make the engine to display
|
/// Deletes the displayed fields ids, this will make the engine to display
|
||||||
/// all the documents attributes in the order of the `FieldsIdsMap`.
|
/// all the documents attributes in the order of the `FieldsIdsMap`.
|
||||||
pub(crate) fn delete_displayed_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_displayed_fields(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::DISPLAYED_FIELDS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::DISPLAYED_FIELDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the displayed fields in the order they were set by the user. If it returns
|
/// Returns the displayed fields in the order they were set by the user. If it returns
|
||||||
/// `None` it means that all the attributes are set as displayed in the order of the `FieldsIdsMap`.
|
/// `None` it means that all the attributes are set as displayed in the order of the `FieldsIdsMap`.
|
||||||
pub fn displayed_fields<'t>(&self, rtxn: &'t RoTxn) -> heed::Result<Option<Vec<&'t str>>> {
|
pub fn displayed_fields<'t>(&self, rtxn: &'t RoTxn<'_>) -> heed::Result<Option<Vec<&'t str>>> {
|
||||||
self.main
|
self.main
|
||||||
.remap_types::<Str, SerdeBincode<Vec<&'t str>>>()
|
.remap_types::<Str, SerdeBincode<Vec<&'t str>>>()
|
||||||
.get(rtxn, main_key::DISPLAYED_FIELDS_KEY)
|
.get(rtxn, main_key::DISPLAYED_FIELDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Identical to `displayed_fields`, but returns the ids instead.
|
/// Identical to `displayed_fields`, but returns the ids instead.
|
||||||
pub fn displayed_fields_ids(&self, rtxn: &RoTxn) -> Result<Option<Vec<FieldId>>> {
|
pub fn displayed_fields_ids(&self, rtxn: &RoTxn<'_>) -> Result<Option<Vec<FieldId>>> {
|
||||||
match self.displayed_fields(rtxn)? {
|
match self.displayed_fields(rtxn)? {
|
||||||
Some(fields) => {
|
Some(fields) => {
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||||
@ -622,7 +629,7 @@ impl Index {
|
|||||||
/* remove hidden fields */
|
/* remove hidden fields */
|
||||||
pub fn remove_hidden_fields(
|
pub fn remove_hidden_fields(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn<'_>,
|
||||||
fields: impl IntoIterator<Item = impl AsRef<str>>,
|
fields: impl IntoIterator<Item = impl AsRef<str>>,
|
||||||
) -> Result<(BTreeSet<String>, bool)> {
|
) -> Result<(BTreeSet<String>, bool)> {
|
||||||
let mut valid_fields =
|
let mut valid_fields =
|
||||||
@ -644,7 +651,7 @@ impl Index {
|
|||||||
/// Write the user defined searchable fields and generate the real searchable fields from the specified fields ids map.
|
/// Write the user defined searchable fields and generate the real searchable fields from the specified fields ids map.
|
||||||
pub(crate) fn put_all_searchable_fields_from_fields_ids_map(
|
pub(crate) fn put_all_searchable_fields_from_fields_ids_map(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
user_fields: &[&str],
|
user_fields: &[&str],
|
||||||
non_searchable_fields_ids: &[FieldId],
|
non_searchable_fields_ids: &[FieldId],
|
||||||
fields_ids_map: &FieldsIdsMap,
|
fields_ids_map: &FieldsIdsMap,
|
||||||
@ -681,7 +688,7 @@ impl Index {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_all_searchable_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_all_searchable_fields(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
let did_delete_searchable = self.delete_searchable_fields(wtxn)?;
|
let did_delete_searchable = self.delete_searchable_fields(wtxn)?;
|
||||||
let did_delete_user_defined = self.delete_user_defined_searchable_fields(wtxn)?;
|
let did_delete_user_defined = self.delete_user_defined_searchable_fields(wtxn)?;
|
||||||
self.delete_fieldids_weights_map(wtxn)?;
|
self.delete_fieldids_weights_map(wtxn)?;
|
||||||
@ -689,7 +696,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Writes the searchable fields, when this list is specified, only these are indexed.
|
/// Writes the searchable fields, when this list is specified, only these are indexed.
|
||||||
fn put_searchable_fields(&self, wtxn: &mut RwTxn, fields: &[&str]) -> heed::Result<()> {
|
fn put_searchable_fields(&self, wtxn: &mut RwTxn<'_>, fields: &[&str]) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<&[&str]>>().put(
|
self.main.remap_types::<Str, SerdeBincode<&[&str]>>().put(
|
||||||
wtxn,
|
wtxn,
|
||||||
main_key::SEARCHABLE_FIELDS_KEY,
|
main_key::SEARCHABLE_FIELDS_KEY,
|
||||||
@ -698,12 +705,12 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Deletes the searchable fields, when no fields are specified, all fields are indexed.
|
/// Deletes the searchable fields, when no fields are specified, all fields are indexed.
|
||||||
fn delete_searchable_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
fn delete_searchable_fields(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::SEARCHABLE_FIELDS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::SEARCHABLE_FIELDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the searchable fields, those are the fields that are indexed,
|
/// Returns the searchable fields, those are the fields that are indexed,
|
||||||
pub fn searchable_fields<'t>(&self, rtxn: &'t RoTxn) -> heed::Result<Vec<Cow<'t, str>>> {
|
pub fn searchable_fields<'t>(&self, rtxn: &'t RoTxn<'_>) -> heed::Result<Vec<Cow<'t, str>>> {
|
||||||
self.main
|
self.main
|
||||||
.remap_types::<Str, SerdeBincode<Vec<&'t str>>>()
|
.remap_types::<Str, SerdeBincode<Vec<&'t str>>>()
|
||||||
.get(rtxn, main_key::SEARCHABLE_FIELDS_KEY)?
|
.get(rtxn, main_key::SEARCHABLE_FIELDS_KEY)?
|
||||||
@ -719,7 +726,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Identical to `searchable_fields`, but returns the ids instead.
|
/// Identical to `searchable_fields`, but returns the ids instead.
|
||||||
pub fn searchable_fields_ids(&self, rtxn: &RoTxn) -> Result<Vec<FieldId>> {
|
pub fn searchable_fields_ids(&self, rtxn: &RoTxn<'_>) -> Result<Vec<FieldId>> {
|
||||||
let fields = self.searchable_fields(rtxn)?;
|
let fields = self.searchable_fields(rtxn)?;
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||||
let mut fields_ids = Vec::new();
|
let mut fields_ids = Vec::new();
|
||||||
@ -734,7 +741,7 @@ impl Index {
|
|||||||
/// Writes the searchable fields, when this list is specified, only these are indexed.
|
/// Writes the searchable fields, when this list is specified, only these are indexed.
|
||||||
pub(crate) fn put_user_defined_searchable_fields(
|
pub(crate) fn put_user_defined_searchable_fields(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
fields: &[&str],
|
fields: &[&str],
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<_>>().put(
|
self.main.remap_types::<Str, SerdeBincode<_>>().put(
|
||||||
@ -747,7 +754,7 @@ impl Index {
|
|||||||
/// Deletes the searchable fields, when no fields are specified, all fields are indexed.
|
/// Deletes the searchable fields, when no fields are specified, all fields are indexed.
|
||||||
pub(crate) fn delete_user_defined_searchable_fields(
|
pub(crate) fn delete_user_defined_searchable_fields(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
) -> heed::Result<bool> {
|
) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::USER_DEFINED_SEARCHABLE_FIELDS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::USER_DEFINED_SEARCHABLE_FIELDS_KEY)
|
||||||
}
|
}
|
||||||
@ -755,7 +762,7 @@ impl Index {
|
|||||||
/// Returns the user defined searchable fields.
|
/// Returns the user defined searchable fields.
|
||||||
pub fn user_defined_searchable_fields<'t>(
|
pub fn user_defined_searchable_fields<'t>(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &'t RoTxn,
|
rtxn: &'t RoTxn<'t>,
|
||||||
) -> heed::Result<Option<Vec<&'t str>>> {
|
) -> heed::Result<Option<Vec<&'t str>>> {
|
||||||
self.main
|
self.main
|
||||||
.remap_types::<Str, SerdeBincode<Vec<_>>>()
|
.remap_types::<Str, SerdeBincode<Vec<_>>>()
|
||||||
@ -763,7 +770,10 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Identical to `user_defined_searchable_fields`, but returns ids instead.
|
/// Identical to `user_defined_searchable_fields`, but returns ids instead.
|
||||||
pub fn user_defined_searchable_fields_ids(&self, rtxn: &RoTxn) -> Result<Option<Vec<FieldId>>> {
|
pub fn user_defined_searchable_fields_ids(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn<'_>,
|
||||||
|
) -> Result<Option<Vec<FieldId>>> {
|
||||||
match self.user_defined_searchable_fields(rtxn)? {
|
match self.user_defined_searchable_fields(rtxn)? {
|
||||||
Some(fields) => {
|
Some(fields) => {
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||||
@ -784,7 +794,7 @@ impl Index {
|
|||||||
/// Writes the filterable fields names in the database.
|
/// Writes the filterable fields names in the database.
|
||||||
pub(crate) fn put_filterable_fields(
|
pub(crate) fn put_filterable_fields(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
fields: &HashSet<String>,
|
fields: &HashSet<String>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<_>>().put(
|
self.main.remap_types::<Str, SerdeJson<_>>().put(
|
||||||
@ -795,12 +805,12 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Deletes the filterable fields ids in the database.
|
/// Deletes the filterable fields ids in the database.
|
||||||
pub(crate) fn delete_filterable_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_filterable_fields(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::FILTERABLE_FIELDS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::FILTERABLE_FIELDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the filterable fields names.
|
/// Returns the filterable fields names.
|
||||||
pub fn filterable_fields(&self, rtxn: &RoTxn) -> heed::Result<HashSet<String>> {
|
pub fn filterable_fields(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashSet<String>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<_>>()
|
.remap_types::<Str, SerdeJson<_>>()
|
||||||
@ -809,7 +819,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Identical to `filterable_fields`, but returns ids instead.
|
/// Identical to `filterable_fields`, but returns ids instead.
|
||||||
pub fn filterable_fields_ids(&self, rtxn: &RoTxn) -> Result<HashSet<FieldId>> {
|
pub fn filterable_fields_ids(&self, rtxn: &RoTxn<'_>) -> Result<HashSet<FieldId>> {
|
||||||
let fields = self.filterable_fields(rtxn)?;
|
let fields = self.filterable_fields(rtxn)?;
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||||
|
|
||||||
@ -828,7 +838,7 @@ impl Index {
|
|||||||
/// Writes the sortable fields names in the database.
|
/// Writes the sortable fields names in the database.
|
||||||
pub(crate) fn put_sortable_fields(
|
pub(crate) fn put_sortable_fields(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
fields: &HashSet<String>,
|
fields: &HashSet<String>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<_>>().put(
|
self.main.remap_types::<Str, SerdeJson<_>>().put(
|
||||||
@ -839,12 +849,12 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Deletes the sortable fields ids in the database.
|
/// Deletes the sortable fields ids in the database.
|
||||||
pub(crate) fn delete_sortable_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_sortable_fields(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::SORTABLE_FIELDS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::SORTABLE_FIELDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the sortable fields names.
|
/// Returns the sortable fields names.
|
||||||
pub fn sortable_fields(&self, rtxn: &RoTxn) -> heed::Result<HashSet<String>> {
|
pub fn sortable_fields(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashSet<String>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<_>>()
|
.remap_types::<Str, SerdeJson<_>>()
|
||||||
@ -853,7 +863,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Identical to `sortable_fields`, but returns ids instead.
|
/// Identical to `sortable_fields`, but returns ids instead.
|
||||||
pub fn sortable_fields_ids(&self, rtxn: &RoTxn) -> Result<HashSet<FieldId>> {
|
pub fn sortable_fields_ids(&self, rtxn: &RoTxn<'_>) -> Result<HashSet<FieldId>> {
|
||||||
let fields = self.sortable_fields(rtxn)?;
|
let fields = self.sortable_fields(rtxn)?;
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||||
Ok(fields.into_iter().filter_map(|name| fields_ids_map.id(&name)).collect())
|
Ok(fields.into_iter().filter_map(|name| fields_ids_map.id(&name)).collect())
|
||||||
@ -864,7 +874,7 @@ impl Index {
|
|||||||
/// Writes the faceted fields in the database.
|
/// Writes the faceted fields in the database.
|
||||||
pub(crate) fn put_faceted_fields(
|
pub(crate) fn put_faceted_fields(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
fields: &HashSet<String>,
|
fields: &HashSet<String>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<_>>().put(
|
self.main.remap_types::<Str, SerdeJson<_>>().put(
|
||||||
@ -875,7 +885,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the faceted fields names.
|
/// Returns the faceted fields names.
|
||||||
pub fn faceted_fields(&self, rtxn: &RoTxn) -> heed::Result<HashSet<String>> {
|
pub fn faceted_fields(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashSet<String>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<_>>()
|
.remap_types::<Str, SerdeJson<_>>()
|
||||||
@ -884,7 +894,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Identical to `faceted_fields`, but returns ids instead.
|
/// Identical to `faceted_fields`, but returns ids instead.
|
||||||
pub fn faceted_fields_ids(&self, rtxn: &RoTxn) -> Result<HashSet<FieldId>> {
|
pub fn faceted_fields_ids(&self, rtxn: &RoTxn<'_>) -> Result<HashSet<FieldId>> {
|
||||||
let fields = self.faceted_fields(rtxn)?;
|
let fields = self.faceted_fields(rtxn)?;
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||||
|
|
||||||
@ -903,7 +913,7 @@ impl Index {
|
|||||||
/// Returns the user defined faceted fields names.
|
/// Returns the user defined faceted fields names.
|
||||||
///
|
///
|
||||||
/// The user faceted fields are the union of all the filterable, sortable, distinct, and Asc/Desc fields.
|
/// The user faceted fields are the union of all the filterable, sortable, distinct, and Asc/Desc fields.
|
||||||
pub fn user_defined_faceted_fields(&self, rtxn: &RoTxn) -> Result<HashSet<String>> {
|
pub fn user_defined_faceted_fields(&self, rtxn: &RoTxn<'_>) -> Result<HashSet<String>> {
|
||||||
let filterable_fields = self.filterable_fields(rtxn)?;
|
let filterable_fields = self.filterable_fields(rtxn)?;
|
||||||
let sortable_fields = self.sortable_fields(rtxn)?;
|
let sortable_fields = self.sortable_fields(rtxn)?;
|
||||||
let distinct_field = self.distinct_field(rtxn)?;
|
let distinct_field = self.distinct_field(rtxn)?;
|
||||||
@ -924,7 +934,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Identical to `user_defined_faceted_fields`, but returns ids instead.
|
/// Identical to `user_defined_faceted_fields`, but returns ids instead.
|
||||||
pub fn user_defined_faceted_fields_ids(&self, rtxn: &RoTxn) -> Result<HashSet<FieldId>> {
|
pub fn user_defined_faceted_fields_ids(&self, rtxn: &RoTxn<'_>) -> Result<HashSet<FieldId>> {
|
||||||
let fields = self.user_defined_faceted_fields(rtxn)?;
|
let fields = self.user_defined_faceted_fields(rtxn)?;
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||||
|
|
||||||
@ -943,7 +953,7 @@ impl Index {
|
|||||||
/// Retrieve all the documents which contain this field id set as null
|
/// Retrieve all the documents which contain this field id set as null
|
||||||
pub fn null_faceted_documents_ids(
|
pub fn null_faceted_documents_ids(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn<'_>,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
) -> heed::Result<RoaringBitmap> {
|
) -> heed::Result<RoaringBitmap> {
|
||||||
match self.facet_id_is_null_docids.get(rtxn, &field_id)? {
|
match self.facet_id_is_null_docids.get(rtxn, &field_id)? {
|
||||||
@ -955,7 +965,7 @@ impl Index {
|
|||||||
/// Retrieve all the documents which contain this field id and that is considered empty
|
/// Retrieve all the documents which contain this field id and that is considered empty
|
||||||
pub fn empty_faceted_documents_ids(
|
pub fn empty_faceted_documents_ids(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn<'_>,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
) -> heed::Result<RoaringBitmap> {
|
) -> heed::Result<RoaringBitmap> {
|
||||||
match self.facet_id_is_empty_docids.get(rtxn, &field_id)? {
|
match self.facet_id_is_empty_docids.get(rtxn, &field_id)? {
|
||||||
@ -967,7 +977,7 @@ impl Index {
|
|||||||
/// Retrieve all the documents which contain this field id
|
/// Retrieve all the documents which contain this field id
|
||||||
pub fn exists_faceted_documents_ids(
|
pub fn exists_faceted_documents_ids(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn<'_>,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
) -> heed::Result<RoaringBitmap> {
|
) -> heed::Result<RoaringBitmap> {
|
||||||
match self.facet_id_exists_docids.get(rtxn, &field_id)? {
|
match self.facet_id_exists_docids.get(rtxn, &field_id)? {
|
||||||
@ -980,17 +990,17 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_distinct_field(
|
pub(crate) fn put_distinct_field(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
distinct_field: &str,
|
distinct_field: &str,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, Str>().put(wtxn, main_key::DISTINCT_FIELD_KEY, distinct_field)
|
self.main.remap_types::<Str, Str>().put(wtxn, main_key::DISTINCT_FIELD_KEY, distinct_field)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn distinct_field<'a>(&self, rtxn: &'a RoTxn) -> heed::Result<Option<&'a str>> {
|
pub fn distinct_field<'a>(&self, rtxn: &'a RoTxn<'_>) -> heed::Result<Option<&'a str>> {
|
||||||
self.main.remap_types::<Str, Str>().get(rtxn, main_key::DISTINCT_FIELD_KEY)
|
self.main.remap_types::<Str, Str>().get(rtxn, main_key::DISTINCT_FIELD_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_distinct_field(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_distinct_field(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::DISTINCT_FIELD_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::DISTINCT_FIELD_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -998,7 +1008,7 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_criteria(
|
pub(crate) fn put_criteria(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
criteria: &[Criterion],
|
criteria: &[Criterion],
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<&[Criterion]>>().put(
|
self.main.remap_types::<Str, SerdeJson<&[Criterion]>>().put(
|
||||||
@ -1008,11 +1018,11 @@ impl Index {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_criteria(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_criteria(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::CRITERIA_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::CRITERIA_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn criteria(&self, rtxn: &RoTxn) -> heed::Result<Vec<Criterion>> {
|
pub fn criteria(&self, rtxn: &RoTxn<'_>) -> heed::Result<Vec<Criterion>> {
|
||||||
match self
|
match self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<Vec<Criterion>>>()
|
.remap_types::<Str, SerdeJson<Vec<Criterion>>>()
|
||||||
@ -1028,7 +1038,7 @@ impl Index {
|
|||||||
/// Writes the FST which is the words dictionary of the engine.
|
/// Writes the FST which is the words dictionary of the engine.
|
||||||
pub(crate) fn put_words_fst<A: AsRef<[u8]>>(
|
pub(crate) fn put_words_fst<A: AsRef<[u8]>>(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
fst: &fst::Set<A>,
|
fst: &fst::Set<A>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, Bytes>().put(
|
self.main.remap_types::<Str, Bytes>().put(
|
||||||
@ -1039,7 +1049,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the FST which is the words dictionary of the engine.
|
/// Returns the FST which is the words dictionary of the engine.
|
||||||
pub fn words_fst<'t>(&self, rtxn: &'t RoTxn) -> Result<fst::Set<Cow<'t, [u8]>>> {
|
pub fn words_fst<'t>(&self, rtxn: &'t RoTxn<'_>) -> Result<fst::Set<Cow<'t, [u8]>>> {
|
||||||
match self.main.remap_types::<Str, Bytes>().get(rtxn, main_key::WORDS_FST_KEY)? {
|
match self.main.remap_types::<Str, Bytes>().get(rtxn, main_key::WORDS_FST_KEY)? {
|
||||||
Some(bytes) => Ok(fst::Set::new(bytes)?.map_data(Cow::Borrowed)?),
|
Some(bytes) => Ok(fst::Set::new(bytes)?.map_data(Cow::Borrowed)?),
|
||||||
None => Ok(fst::Set::default().map_data(Cow::Owned)?),
|
None => Ok(fst::Set::default().map_data(Cow::Owned)?),
|
||||||
@ -1050,7 +1060,7 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_stop_words<A: AsRef<[u8]>>(
|
pub(crate) fn put_stop_words<A: AsRef<[u8]>>(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
fst: &fst::Set<A>,
|
fst: &fst::Set<A>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, Bytes>().put(
|
self.main.remap_types::<Str, Bytes>().put(
|
||||||
@ -1060,11 +1070,11 @@ impl Index {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_stop_words(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_stop_words(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::STOP_WORDS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::STOP_WORDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn stop_words<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<fst::Set<&'t [u8]>>> {
|
pub fn stop_words<'t>(&self, rtxn: &'t RoTxn<'t>) -> Result<Option<fst::Set<&'t [u8]>>> {
|
||||||
match self.main.remap_types::<Str, Bytes>().get(rtxn, main_key::STOP_WORDS_KEY)? {
|
match self.main.remap_types::<Str, Bytes>().get(rtxn, main_key::STOP_WORDS_KEY)? {
|
||||||
Some(bytes) => Ok(Some(fst::Set::new(bytes)?)),
|
Some(bytes) => Ok(Some(fst::Set::new(bytes)?)),
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
@ -1075,7 +1085,7 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_non_separator_tokens(
|
pub(crate) fn put_non_separator_tokens(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
set: &BTreeSet<String>,
|
set: &BTreeSet<String>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<_>>().put(
|
self.main.remap_types::<Str, SerdeBincode<_>>().put(
|
||||||
@ -1085,11 +1095,11 @@ impl Index {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_non_separator_tokens(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_non_separator_tokens(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::NON_SEPARATOR_TOKENS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::NON_SEPARATOR_TOKENS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn non_separator_tokens(&self, rtxn: &RoTxn) -> Result<Option<BTreeSet<String>>> {
|
pub fn non_separator_tokens(&self, rtxn: &RoTxn<'_>) -> Result<Option<BTreeSet<String>>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeBincode<BTreeSet<String>>>()
|
.remap_types::<Str, SerdeBincode<BTreeSet<String>>>()
|
||||||
@ -1100,7 +1110,7 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_separator_tokens(
|
pub(crate) fn put_separator_tokens(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
set: &BTreeSet<String>,
|
set: &BTreeSet<String>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<_>>().put(
|
self.main.remap_types::<Str, SerdeBincode<_>>().put(
|
||||||
@ -1110,11 +1120,11 @@ impl Index {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_separator_tokens(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_separator_tokens(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::SEPARATOR_TOKENS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::SEPARATOR_TOKENS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn separator_tokens(&self, rtxn: &RoTxn) -> Result<Option<BTreeSet<String>>> {
|
pub fn separator_tokens(&self, rtxn: &RoTxn<'_>) -> Result<Option<BTreeSet<String>>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeBincode<BTreeSet<String>>>()
|
.remap_types::<Str, SerdeBincode<BTreeSet<String>>>()
|
||||||
@ -1123,7 +1133,7 @@ impl Index {
|
|||||||
|
|
||||||
/* separators easing method */
|
/* separators easing method */
|
||||||
|
|
||||||
pub fn allowed_separators(&self, rtxn: &RoTxn) -> Result<Option<BTreeSet<String>>> {
|
pub fn allowed_separators(&self, rtxn: &RoTxn<'_>) -> Result<Option<BTreeSet<String>>> {
|
||||||
let default_separators =
|
let default_separators =
|
||||||
charabia::separators::DEFAULT_SEPARATORS.iter().map(|s| s.to_string());
|
charabia::separators::DEFAULT_SEPARATORS.iter().map(|s| s.to_string());
|
||||||
let mut separators: Option<BTreeSet<_>> = None;
|
let mut separators: Option<BTreeSet<_>> = None;
|
||||||
@ -1145,17 +1155,17 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_dictionary(
|
pub(crate) fn put_dictionary(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
set: &BTreeSet<String>,
|
set: &BTreeSet<String>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<_>>().put(wtxn, main_key::DICTIONARY_KEY, set)
|
self.main.remap_types::<Str, SerdeBincode<_>>().put(wtxn, main_key::DICTIONARY_KEY, set)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_dictionary(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_dictionary(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::DICTIONARY_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::DICTIONARY_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dictionary(&self, rtxn: &RoTxn) -> Result<Option<BTreeSet<String>>> {
|
pub fn dictionary(&self, rtxn: &RoTxn<'_>) -> Result<Option<BTreeSet<String>>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeBincode<BTreeSet<String>>>()
|
.remap_types::<Str, SerdeBincode<BTreeSet<String>>>()
|
||||||
@ -1166,7 +1176,7 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_synonyms(
|
pub(crate) fn put_synonyms(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
synonyms: &HashMap<Vec<String>, Vec<Vec<String>>>,
|
synonyms: &HashMap<Vec<String>, Vec<Vec<String>>>,
|
||||||
user_defined_synonyms: &BTreeMap<String, Vec<String>>,
|
user_defined_synonyms: &BTreeMap<String, Vec<String>>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
@ -1182,14 +1192,14 @@ impl Index {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_synonyms(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_synonyms(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::SYNONYMS_KEY)?;
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::SYNONYMS_KEY)?;
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::USER_DEFINED_SYNONYMS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::USER_DEFINED_SYNONYMS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn user_defined_synonyms(
|
pub fn user_defined_synonyms(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn<'_>,
|
||||||
) -> heed::Result<BTreeMap<String, Vec<String>>> {
|
) -> heed::Result<BTreeMap<String, Vec<String>>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
@ -1198,7 +1208,10 @@ impl Index {
|
|||||||
.unwrap_or_default())
|
.unwrap_or_default())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn synonyms(&self, rtxn: &RoTxn) -> heed::Result<HashMap<Vec<String>, Vec<Vec<String>>>> {
|
pub fn synonyms(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn<'_>,
|
||||||
|
) -> heed::Result<HashMap<Vec<String>, Vec<Vec<String>>>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeBincode<_>>()
|
.remap_types::<Str, SerdeBincode<_>>()
|
||||||
@ -1208,7 +1221,7 @@ impl Index {
|
|||||||
|
|
||||||
pub fn words_synonyms<S: AsRef<str>>(
|
pub fn words_synonyms<S: AsRef<str>>(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn<'_>,
|
||||||
words: &[S],
|
words: &[S],
|
||||||
) -> heed::Result<Option<Vec<Vec<String>>>> {
|
) -> heed::Result<Option<Vec<Vec<String>>>> {
|
||||||
let words: Vec<_> = words.iter().map(|s| s.as_ref().to_owned()).collect();
|
let words: Vec<_> = words.iter().map(|s| s.as_ref().to_owned()).collect();
|
||||||
@ -1220,7 +1233,7 @@ impl Index {
|
|||||||
/// Writes the FST which is the words prefixes dictionary of the engine.
|
/// Writes the FST which is the words prefixes dictionary of the engine.
|
||||||
pub(crate) fn put_words_prefixes_fst<A: AsRef<[u8]>>(
|
pub(crate) fn put_words_prefixes_fst<A: AsRef<[u8]>>(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
fst: &fst::Set<A>,
|
fst: &fst::Set<A>,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, Bytes>().put(
|
self.main.remap_types::<Str, Bytes>().put(
|
||||||
@ -1231,7 +1244,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the FST which is the words prefixes dictionary of the engine.
|
/// Returns the FST which is the words prefixes dictionary of the engine.
|
||||||
pub fn words_prefixes_fst<'t>(&self, rtxn: &'t RoTxn) -> Result<fst::Set<Cow<'t, [u8]>>> {
|
pub fn words_prefixes_fst<'t>(&self, rtxn: &'t RoTxn<'t>) -> Result<fst::Set<Cow<'t, [u8]>>> {
|
||||||
match self.main.remap_types::<Str, Bytes>().get(rtxn, main_key::WORDS_PREFIXES_FST_KEY)? {
|
match self.main.remap_types::<Str, Bytes>().get(rtxn, main_key::WORDS_PREFIXES_FST_KEY)? {
|
||||||
Some(bytes) => Ok(fst::Set::new(bytes)?.map_data(Cow::Borrowed)?),
|
Some(bytes) => Ok(fst::Set::new(bytes)?.map_data(Cow::Borrowed)?),
|
||||||
None => Ok(fst::Set::default().map_data(Cow::Owned)?),
|
None => Ok(fst::Set::default().map_data(Cow::Owned)?),
|
||||||
@ -1242,7 +1255,7 @@ impl Index {
|
|||||||
|
|
||||||
/// Returns the number of documents ids associated with the given word,
|
/// Returns the number of documents ids associated with the given word,
|
||||||
/// it is much faster than deserializing the bitmap and getting the length of it.
|
/// it is much faster than deserializing the bitmap and getting the length of it.
|
||||||
pub fn word_documents_count(&self, rtxn: &RoTxn, word: &str) -> heed::Result<Option<u64>> {
|
pub fn word_documents_count(&self, rtxn: &RoTxn<'_>, word: &str) -> heed::Result<Option<u64>> {
|
||||||
self.word_docids.remap_data_type::<RoaringBitmapLenCodec>().get(rtxn, word)
|
self.word_docids.remap_data_type::<RoaringBitmapLenCodec>().get(rtxn, word)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1251,7 +1264,7 @@ impl Index {
|
|||||||
/// Returns an iterator over the requested documents. The next item will be an error if a document is missing.
|
/// Returns an iterator over the requested documents. The next item will be an error if a document is missing.
|
||||||
pub fn iter_documents<'a, 't: 'a>(
|
pub fn iter_documents<'a, 't: 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
rtxn: &'t RoTxn,
|
rtxn: &'t RoTxn<'t>,
|
||||||
ids: impl IntoIterator<Item = DocumentId> + 'a,
|
ids: impl IntoIterator<Item = DocumentId> + 'a,
|
||||||
) -> Result<impl Iterator<Item = Result<(DocumentId, obkv::KvReaderU16<'t>)>> + 'a> {
|
) -> Result<impl Iterator<Item = Result<(DocumentId, obkv::KvReaderU16<'t>)>> + 'a> {
|
||||||
Ok(ids.into_iter().map(move |id| {
|
Ok(ids.into_iter().map(move |id| {
|
||||||
@ -1266,7 +1279,7 @@ impl Index {
|
|||||||
/// Returns a [`Vec`] of the requested documents. Returns an error if a document is missing.
|
/// Returns a [`Vec`] of the requested documents. Returns an error if a document is missing.
|
||||||
pub fn documents<'t>(
|
pub fn documents<'t>(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &'t RoTxn,
|
rtxn: &'t RoTxn<'t>,
|
||||||
ids: impl IntoIterator<Item = DocumentId>,
|
ids: impl IntoIterator<Item = DocumentId>,
|
||||||
) -> Result<Vec<(DocumentId, obkv::KvReaderU16<'t>)>> {
|
) -> Result<Vec<(DocumentId, obkv::KvReaderU16<'t>)>> {
|
||||||
self.iter_documents(rtxn, ids)?.collect()
|
self.iter_documents(rtxn, ids)?.collect()
|
||||||
@ -1275,14 +1288,14 @@ impl Index {
|
|||||||
/// Returns an iterator over all the documents in the index.
|
/// Returns an iterator over all the documents in the index.
|
||||||
pub fn all_documents<'a, 't: 'a>(
|
pub fn all_documents<'a, 't: 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
rtxn: &'t RoTxn,
|
rtxn: &'t RoTxn<'t>,
|
||||||
) -> Result<impl Iterator<Item = Result<(DocumentId, obkv::KvReaderU16<'t>)>> + 'a> {
|
) -> Result<impl Iterator<Item = Result<(DocumentId, obkv::KvReaderU16<'t>)>> + 'a> {
|
||||||
self.iter_documents(rtxn, self.documents_ids(rtxn)?)
|
self.iter_documents(rtxn, self.documents_ids(rtxn)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn external_id_of<'a, 't: 'a>(
|
pub fn external_id_of<'a, 't: 'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
rtxn: &'t RoTxn,
|
rtxn: &'t RoTxn<'t>,
|
||||||
ids: impl IntoIterator<Item = DocumentId> + 'a,
|
ids: impl IntoIterator<Item = DocumentId> + 'a,
|
||||||
) -> Result<impl IntoIterator<Item = Result<String>> + 'a> {
|
) -> Result<impl IntoIterator<Item = Result<String>> + 'a> {
|
||||||
let fields = self.fields_ids_map(rtxn)?;
|
let fields = self.fields_ids_map(rtxn)?;
|
||||||
@ -1310,16 +1323,16 @@ impl Index {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn facets_distribution<'a>(&'a self, rtxn: &'a RoTxn) -> FacetDistribution<'a> {
|
pub fn facets_distribution<'a>(&'a self, rtxn: &'a RoTxn<'a>) -> FacetDistribution<'a> {
|
||||||
FacetDistribution::new(rtxn, self)
|
FacetDistribution::new(rtxn, self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn search<'a>(&'a self, rtxn: &'a RoTxn) -> Search<'a> {
|
pub fn search<'a>(&'a self, rtxn: &'a RoTxn<'a>) -> Search<'a> {
|
||||||
Search::new(rtxn, self)
|
Search::new(rtxn, self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the index creation time.
|
/// Returns the index creation time.
|
||||||
pub fn created_at(&self, rtxn: &RoTxn) -> Result<OffsetDateTime> {
|
pub fn created_at(&self, rtxn: &RoTxn<'_>) -> Result<OffsetDateTime> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<OffsetDateTime>>()
|
.remap_types::<Str, SerdeJson<OffsetDateTime>>()
|
||||||
@ -1331,7 +1344,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the index last updated time.
|
/// Returns the index last updated time.
|
||||||
pub fn updated_at(&self, rtxn: &RoTxn) -> Result<OffsetDateTime> {
|
pub fn updated_at(&self, rtxn: &RoTxn<'_>) -> Result<OffsetDateTime> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<OffsetDateTime>>()
|
.remap_types::<Str, SerdeJson<OffsetDateTime>>()
|
||||||
@ -1344,7 +1357,7 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn set_updated_at(
|
pub(crate) fn set_updated_at(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
time: &OffsetDateTime,
|
time: &OffsetDateTime,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<OffsetDateTime>>().put(
|
self.main.remap_types::<Str, SerdeJson<OffsetDateTime>>().put(
|
||||||
@ -1354,7 +1367,7 @@ impl Index {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn authorize_typos(&self, txn: &RoTxn) -> heed::Result<bool> {
|
pub fn authorize_typos(&self, txn: &RoTxn<'_>) -> heed::Result<bool> {
|
||||||
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
||||||
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
||||||
// because by default, we authorize typos.
|
// because by default, we authorize typos.
|
||||||
@ -1364,7 +1377,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn put_authorize_typos(&self, txn: &mut RwTxn, flag: bool) -> heed::Result<()> {
|
pub(crate) fn put_authorize_typos(&self, txn: &mut RwTxn<'_>, flag: bool) -> heed::Result<()> {
|
||||||
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
||||||
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
||||||
// because by default, we authorize typos.
|
// because by default, we authorize typos.
|
||||||
@ -1373,7 +1386,7 @@ impl Index {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn min_word_len_one_typo(&self, txn: &RoTxn) -> heed::Result<u8> {
|
pub fn min_word_len_one_typo(&self, txn: &RoTxn<'_>) -> heed::Result<u8> {
|
||||||
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
||||||
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
||||||
// because by default, we authorize typos.
|
// because by default, we authorize typos.
|
||||||
@ -1384,7 +1397,11 @@ impl Index {
|
|||||||
.unwrap_or(DEFAULT_MIN_WORD_LEN_ONE_TYPO))
|
.unwrap_or(DEFAULT_MIN_WORD_LEN_ONE_TYPO))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn put_min_word_len_one_typo(&self, txn: &mut RwTxn, val: u8) -> heed::Result<()> {
|
pub(crate) fn put_min_word_len_one_typo(
|
||||||
|
&self,
|
||||||
|
txn: &mut RwTxn<'_>,
|
||||||
|
val: u8,
|
||||||
|
) -> heed::Result<()> {
|
||||||
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
||||||
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
||||||
// because by default, we authorize typos.
|
// because by default, we authorize typos.
|
||||||
@ -1392,7 +1409,7 @@ impl Index {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn min_word_len_two_typos(&self, txn: &RoTxn) -> heed::Result<u8> {
|
pub fn min_word_len_two_typos(&self, txn: &RoTxn<'_>) -> heed::Result<u8> {
|
||||||
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
||||||
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
||||||
// because by default, we authorize typos.
|
// because by default, we authorize typos.
|
||||||
@ -1403,7 +1420,11 @@ impl Index {
|
|||||||
.unwrap_or(DEFAULT_MIN_WORD_LEN_TWO_TYPOS))
|
.unwrap_or(DEFAULT_MIN_WORD_LEN_TWO_TYPOS))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn put_min_word_len_two_typos(&self, txn: &mut RwTxn, val: u8) -> heed::Result<()> {
|
pub(crate) fn put_min_word_len_two_typos(
|
||||||
|
&self,
|
||||||
|
txn: &mut RwTxn<'_>,
|
||||||
|
val: u8,
|
||||||
|
) -> heed::Result<()> {
|
||||||
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
||||||
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
||||||
// because by default, we authorize typos.
|
// because by default, we authorize typos.
|
||||||
@ -1412,7 +1433,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// List the words on which typo are not allowed
|
/// List the words on which typo are not allowed
|
||||||
pub fn exact_words<'t>(&self, txn: &'t RoTxn) -> Result<Option<fst::Set<Cow<'t, [u8]>>>> {
|
pub fn exact_words<'t>(&self, txn: &'t RoTxn<'t>) -> Result<Option<fst::Set<Cow<'t, [u8]>>>> {
|
||||||
match self.main.remap_types::<Str, Bytes>().get(txn, main_key::EXACT_WORDS)? {
|
match self.main.remap_types::<Str, Bytes>().get(txn, main_key::EXACT_WORDS)? {
|
||||||
Some(bytes) => Ok(Some(fst::Set::new(bytes)?.map_data(Cow::Borrowed)?)),
|
Some(bytes) => Ok(Some(fst::Set::new(bytes)?.map_data(Cow::Borrowed)?)),
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
@ -1421,7 +1442,7 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_exact_words<A: AsRef<[u8]>>(
|
pub(crate) fn put_exact_words<A: AsRef<[u8]>>(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
words: &fst::Set<A>,
|
words: &fst::Set<A>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
self.main.remap_types::<Str, Bytes>().put(
|
self.main.remap_types::<Str, Bytes>().put(
|
||||||
@ -1433,7 +1454,7 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the exact attributes: attributes for which typo is disallowed.
|
/// Returns the exact attributes: attributes for which typo is disallowed.
|
||||||
pub fn exact_attributes<'t>(&self, txn: &'t RoTxn) -> Result<Vec<&'t str>> {
|
pub fn exact_attributes<'t>(&self, txn: &'t RoTxn<'t>) -> Result<Vec<&'t str>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeBincode<Vec<&str>>>()
|
.remap_types::<Str, SerdeBincode<Vec<&str>>>()
|
||||||
@ -1442,14 +1463,14 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the list of exact attributes field ids.
|
/// Returns the list of exact attributes field ids.
|
||||||
pub fn exact_attributes_ids(&self, txn: &RoTxn) -> Result<HashSet<FieldId>> {
|
pub fn exact_attributes_ids(&self, txn: &RoTxn<'_>) -> Result<HashSet<FieldId>> {
|
||||||
let attrs = self.exact_attributes(txn)?;
|
let attrs = self.exact_attributes(txn)?;
|
||||||
let fid_map = self.fields_ids_map(txn)?;
|
let fid_map = self.fields_ids_map(txn)?;
|
||||||
Ok(attrs.iter().filter_map(|attr| fid_map.id(attr)).collect())
|
Ok(attrs.iter().filter_map(|attr| fid_map.id(attr)).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Writes the exact attributes to the database.
|
/// Writes the exact attributes to the database.
|
||||||
pub(crate) fn put_exact_attributes(&self, txn: &mut RwTxn, attrs: &[&str]) -> Result<()> {
|
pub(crate) fn put_exact_attributes(&self, txn: &mut RwTxn<'_>, attrs: &[&str]) -> Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<&[&str]>>().put(
|
self.main.remap_types::<Str, SerdeBincode<&[&str]>>().put(
|
||||||
txn,
|
txn,
|
||||||
main_key::EXACT_ATTRIBUTES,
|
main_key::EXACT_ATTRIBUTES,
|
||||||
@ -1459,23 +1480,27 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Clears the exact attributes from the store.
|
/// Clears the exact attributes from the store.
|
||||||
pub(crate) fn delete_exact_attributes(&self, txn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_exact_attributes(&self, txn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(txn, main_key::EXACT_ATTRIBUTES)
|
self.main.remap_key_type::<Str>().delete(txn, main_key::EXACT_ATTRIBUTES)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn max_values_per_facet(&self, txn: &RoTxn) -> heed::Result<Option<u64>> {
|
pub fn max_values_per_facet(&self, txn: &RoTxn<'_>) -> heed::Result<Option<u64>> {
|
||||||
self.main.remap_types::<Str, BEU64>().get(txn, main_key::MAX_VALUES_PER_FACET)
|
self.main.remap_types::<Str, BEU64>().get(txn, main_key::MAX_VALUES_PER_FACET)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn put_max_values_per_facet(&self, txn: &mut RwTxn, val: u64) -> heed::Result<()> {
|
pub(crate) fn put_max_values_per_facet(
|
||||||
|
&self,
|
||||||
|
txn: &mut RwTxn<'_>,
|
||||||
|
val: u64,
|
||||||
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, BEU64>().put(txn, main_key::MAX_VALUES_PER_FACET, &val)
|
self.main.remap_types::<Str, BEU64>().put(txn, main_key::MAX_VALUES_PER_FACET, &val)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_max_values_per_facet(&self, txn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_max_values_per_facet(&self, txn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(txn, main_key::MAX_VALUES_PER_FACET)
|
self.main.remap_key_type::<Str>().delete(txn, main_key::MAX_VALUES_PER_FACET)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sort_facet_values_by(&self, txn: &RoTxn) -> heed::Result<OrderByMap> {
|
pub fn sort_facet_values_by(&self, txn: &RoTxn<'_>) -> heed::Result<OrderByMap> {
|
||||||
let orders = self
|
let orders = self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeJson<OrderByMap>>()
|
.remap_types::<Str, SerdeJson<OrderByMap>>()
|
||||||
@ -1486,33 +1511,36 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_sort_facet_values_by(
|
pub(crate) fn put_sort_facet_values_by(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
val: &OrderByMap,
|
val: &OrderByMap,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeJson<_>>().put(txn, main_key::SORT_FACET_VALUES_BY, &val)
|
self.main.remap_types::<Str, SerdeJson<_>>().put(txn, main_key::SORT_FACET_VALUES_BY, &val)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_sort_facet_values_by(&self, txn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_sort_facet_values_by(&self, txn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(txn, main_key::SORT_FACET_VALUES_BY)
|
self.main.remap_key_type::<Str>().delete(txn, main_key::SORT_FACET_VALUES_BY)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pagination_max_total_hits(&self, txn: &RoTxn) -> heed::Result<Option<u64>> {
|
pub fn pagination_max_total_hits(&self, txn: &RoTxn<'_>) -> heed::Result<Option<u64>> {
|
||||||
self.main.remap_types::<Str, BEU64>().get(txn, main_key::PAGINATION_MAX_TOTAL_HITS)
|
self.main.remap_types::<Str, BEU64>().get(txn, main_key::PAGINATION_MAX_TOTAL_HITS)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn put_pagination_max_total_hits(
|
pub(crate) fn put_pagination_max_total_hits(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
val: u64,
|
val: u64,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, BEU64>().put(txn, main_key::PAGINATION_MAX_TOTAL_HITS, &val)
|
self.main.remap_types::<Str, BEU64>().put(txn, main_key::PAGINATION_MAX_TOTAL_HITS, &val)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_pagination_max_total_hits(&self, txn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_pagination_max_total_hits(
|
||||||
|
&self,
|
||||||
|
txn: &mut RwTxn<'_>,
|
||||||
|
) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(txn, main_key::PAGINATION_MAX_TOTAL_HITS)
|
self.main.remap_key_type::<Str>().delete(txn, main_key::PAGINATION_MAX_TOTAL_HITS)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn proximity_precision(&self, txn: &RoTxn) -> heed::Result<Option<ProximityPrecision>> {
|
pub fn proximity_precision(&self, txn: &RoTxn<'_>) -> heed::Result<Option<ProximityPrecision>> {
|
||||||
self.main
|
self.main
|
||||||
.remap_types::<Str, SerdeBincode<ProximityPrecision>>()
|
.remap_types::<Str, SerdeBincode<ProximityPrecision>>()
|
||||||
.get(txn, main_key::PROXIMITY_PRECISION)
|
.get(txn, main_key::PROXIMITY_PRECISION)
|
||||||
@ -1520,7 +1548,7 @@ impl Index {
|
|||||||
|
|
||||||
pub(crate) fn put_proximity_precision(
|
pub(crate) fn put_proximity_precision(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
val: ProximityPrecision,
|
val: ProximityPrecision,
|
||||||
) -> heed::Result<()> {
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, SerdeBincode<ProximityPrecision>>().put(
|
self.main.remap_types::<Str, SerdeBincode<ProximityPrecision>>().put(
|
||||||
@ -1530,7 +1558,7 @@ impl Index {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_proximity_precision(&self, txn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_proximity_precision(&self, txn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(txn, main_key::PROXIMITY_PRECISION)
|
self.main.remap_key_type::<Str>().delete(txn, main_key::PROXIMITY_PRECISION)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1538,13 +1566,16 @@ impl Index {
|
|||||||
/// Retrieve all the documents ids that correspond with (Script, Language) key, `None` if it is any.
|
/// Retrieve all the documents ids that correspond with (Script, Language) key, `None` if it is any.
|
||||||
pub fn script_language_documents_ids(
|
pub fn script_language_documents_ids(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn<'_>,
|
||||||
key: &(Script, Language),
|
key: &(Script, Language),
|
||||||
) -> heed::Result<Option<RoaringBitmap>> {
|
) -> heed::Result<Option<RoaringBitmap>> {
|
||||||
self.script_language_docids.get(rtxn, key)
|
self.script_language_docids.get(rtxn, key)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn script_language(&self, rtxn: &RoTxn) -> heed::Result<HashMap<Script, Vec<Language>>> {
|
pub fn script_language(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn<'_>,
|
||||||
|
) -> heed::Result<HashMap<Script, Vec<Language>>> {
|
||||||
let mut script_language: HashMap<Script, Vec<Language>> = HashMap::new();
|
let mut script_language: HashMap<Script, Vec<Language>> = HashMap::new();
|
||||||
let mut script_language_doc_count: Vec<(Script, Language, u64)> = Vec::new();
|
let mut script_language_doc_count: Vec<(Script, Language, u64)> = Vec::new();
|
||||||
let mut total = 0;
|
let mut total = 0;
|
||||||
@ -1605,7 +1636,7 @@ impl Index {
|
|||||||
&'a self,
|
&'a self,
|
||||||
rtxn: &'a RoTxn<'a>,
|
rtxn: &'a RoTxn<'a>,
|
||||||
embedder_id: u8,
|
embedder_id: u8,
|
||||||
) -> impl Iterator<Item = Result<arroy::Reader<arroy::distances::Angular>>> + 'a {
|
) -> impl Iterator<Item = Result<arroy::Reader<'a, arroy::distances::Angular>>> + 'a {
|
||||||
crate::vector::arroy_db_range_for_embedder(embedder_id).map_while(move |k| {
|
crate::vector::arroy_db_range_for_embedder(embedder_id).map_while(move |k| {
|
||||||
arroy::Reader::open(rtxn, k, self.vector_arroy)
|
arroy::Reader::open(rtxn, k, self.vector_arroy)
|
||||||
.map(Some)
|
.map(Some)
|
||||||
@ -1763,7 +1794,7 @@ pub(crate) mod tests {
|
|||||||
|
|
||||||
pub fn update_settings(
|
pub fn update_settings(
|
||||||
&self,
|
&self,
|
||||||
update: impl Fn(&mut Settings),
|
update: impl Fn(&mut Settings<'_, '_, '_>),
|
||||||
) -> Result<(), crate::error::Error> {
|
) -> Result<(), crate::error::Error> {
|
||||||
let mut wtxn = self.write_txn().unwrap();
|
let mut wtxn = self.write_txn().unwrap();
|
||||||
self.update_settings_using_wtxn(&mut wtxn, update)?;
|
self.update_settings_using_wtxn(&mut wtxn, update)?;
|
||||||
@ -1773,7 +1804,7 @@ pub(crate) mod tests {
|
|||||||
pub fn update_settings_using_wtxn<'t>(
|
pub fn update_settings_using_wtxn<'t>(
|
||||||
&'t self,
|
&'t self,
|
||||||
wtxn: &mut RwTxn<'t>,
|
wtxn: &mut RwTxn<'t>,
|
||||||
update: impl Fn(&mut Settings),
|
update: impl Fn(&mut Settings<'_, '_, '_>),
|
||||||
) -> Result<(), crate::error::Error> {
|
) -> Result<(), crate::error::Error> {
|
||||||
let mut builder = update::Settings::new(wtxn, &self.inner, &self.indexer_config);
|
let mut builder = update::Settings::new(wtxn, &self.inner, &self.indexer_config);
|
||||||
update(&mut builder);
|
update(&mut builder);
|
||||||
|
@ -211,7 +211,7 @@ pub fn bucketed_position(relative: u16) -> u16 {
|
|||||||
pub fn obkv_to_json(
|
pub fn obkv_to_json(
|
||||||
displayed_fields: &[FieldId],
|
displayed_fields: &[FieldId],
|
||||||
fields_ids_map: &FieldsIdsMap,
|
fields_ids_map: &FieldsIdsMap,
|
||||||
obkv: obkv::KvReaderU16,
|
obkv: obkv::KvReaderU16<'_>,
|
||||||
) -> Result<Object> {
|
) -> Result<Object> {
|
||||||
displayed_fields
|
displayed_fields
|
||||||
.iter()
|
.iter()
|
||||||
@ -229,7 +229,10 @@ pub fn obkv_to_json(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Transform every field of a raw obkv store into a JSON Object.
|
/// Transform every field of a raw obkv store into a JSON Object.
|
||||||
pub fn all_obkv_to_json(obkv: obkv::KvReaderU16, fields_ids_map: &FieldsIdsMap) -> Result<Object> {
|
pub fn all_obkv_to_json(
|
||||||
|
obkv: obkv::KvReaderU16<'_>,
|
||||||
|
fields_ids_map: &FieldsIdsMap,
|
||||||
|
) -> Result<Object> {
|
||||||
let all_keys = obkv.iter().map(|(k, _v)| k).collect::<Vec<_>>();
|
let all_keys = obkv.iter().map(|(k, _v)| k).collect::<Vec<_>>();
|
||||||
obkv_to_json(all_keys.as_slice(), fields_ids_map, obkv)
|
obkv_to_json(all_keys.as_slice(), fields_ids_map, obkv)
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ pub struct FacetDistribution<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> FacetDistribution<'a> {
|
impl<'a> FacetDistribution<'a> {
|
||||||
pub fn new(rtxn: &'a heed::RoTxn, index: &'a Index) -> FacetDistribution<'a> {
|
pub fn new(rtxn: &'a heed::RoTxn<'a>, index: &'a Index) -> FacetDistribution<'a> {
|
||||||
FacetDistribution {
|
FacetDistribution {
|
||||||
facets: None,
|
facets: None,
|
||||||
candidates: None,
|
candidates: None,
|
||||||
@ -374,7 +374,7 @@ impl<'a> FacetDistribution<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for FacetDistribution<'_> {
|
impl fmt::Debug for FacetDistribution<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let FacetDistribution {
|
let FacetDistribution {
|
||||||
facets,
|
facets,
|
||||||
candidates,
|
candidates,
|
||||||
|
@ -221,14 +221,14 @@ impl<'a> Filter<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Filter<'a> {
|
impl<'a> Filter<'a> {
|
||||||
pub fn evaluate(&self, rtxn: &heed::RoTxn, index: &Index) -> Result<RoaringBitmap> {
|
pub fn evaluate(&self, rtxn: &heed::RoTxn<'_>, index: &Index) -> Result<RoaringBitmap> {
|
||||||
// to avoid doing this for each recursive call we're going to do it ONCE ahead of time
|
// to avoid doing this for each recursive call we're going to do it ONCE ahead of time
|
||||||
let filterable_fields = index.filterable_fields(rtxn)?;
|
let filterable_fields = index.filterable_fields(rtxn)?;
|
||||||
self.inner_evaluate(rtxn, index, &filterable_fields, None)
|
self.inner_evaluate(rtxn, index, &filterable_fields, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evaluate_operator(
|
fn evaluate_operator(
|
||||||
rtxn: &heed::RoTxn,
|
rtxn: &heed::RoTxn<'_>,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
universe: Option<&RoaringBitmap>,
|
universe: Option<&RoaringBitmap>,
|
||||||
@ -313,7 +313,7 @@ impl<'a> Filter<'a> {
|
|||||||
/// Aggregates the documents ids that are part of the specified range automatically
|
/// Aggregates the documents ids that are part of the specified range automatically
|
||||||
/// going deeper through the levels.
|
/// going deeper through the levels.
|
||||||
fn explore_facet_number_levels(
|
fn explore_facet_number_levels(
|
||||||
rtxn: &heed::RoTxn,
|
rtxn: &heed::RoTxn<'_>,
|
||||||
db: heed::Database<FacetGroupKeyCodec<OrderedF64Codec>, FacetGroupValueCodec>,
|
db: heed::Database<FacetGroupKeyCodec<OrderedF64Codec>, FacetGroupValueCodec>,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
left: Bound<f64>,
|
left: Bound<f64>,
|
||||||
@ -338,7 +338,7 @@ impl<'a> Filter<'a> {
|
|||||||
|
|
||||||
fn inner_evaluate(
|
fn inner_evaluate(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &heed::RoTxn,
|
rtxn: &heed::RoTxn<'_>,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
filterable_fields: &HashSet<String>,
|
filterable_fields: &HashSet<String>,
|
||||||
universe: Option<&RoaringBitmap>,
|
universe: Option<&RoaringBitmap>,
|
||||||
|
@ -33,7 +33,7 @@ fn facet_extreme_value<'t>(
|
|||||||
|
|
||||||
pub fn facet_min_value<'t>(
|
pub fn facet_min_value<'t>(
|
||||||
index: &'t Index,
|
index: &'t Index,
|
||||||
rtxn: &'t heed::RoTxn,
|
rtxn: &'t heed::RoTxn<'t>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
candidates: RoaringBitmap,
|
candidates: RoaringBitmap,
|
||||||
) -> Result<Option<f64>> {
|
) -> Result<Option<f64>> {
|
||||||
@ -44,7 +44,7 @@ pub fn facet_min_value<'t>(
|
|||||||
|
|
||||||
pub fn facet_max_value<'t>(
|
pub fn facet_max_value<'t>(
|
||||||
index: &'t Index,
|
index: &'t Index,
|
||||||
rtxn: &'t heed::RoTxn,
|
rtxn: &'t heed::RoTxn<'t>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
candidates: RoaringBitmap,
|
candidates: RoaringBitmap,
|
||||||
) -> Result<Option<f64>> {
|
) -> Result<Option<f64>> {
|
||||||
@ -55,7 +55,7 @@ pub fn facet_max_value<'t>(
|
|||||||
|
|
||||||
/// Get the first facet value in the facet database
|
/// Get the first facet value in the facet database
|
||||||
pub(crate) fn get_first_facet_value<'t, BoundCodec, DC>(
|
pub(crate) fn get_first_facet_value<'t, BoundCodec, DC>(
|
||||||
txn: &'t RoTxn,
|
txn: &'t RoTxn<'t>,
|
||||||
db: heed::Database<FacetGroupKeyCodec<BytesRefCodec>, DC>,
|
db: heed::Database<FacetGroupKeyCodec<BytesRefCodec>, DC>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
) -> heed::Result<Option<BoundCodec::DItem>>
|
) -> heed::Result<Option<BoundCodec::DItem>>
|
||||||
@ -79,7 +79,7 @@ where
|
|||||||
|
|
||||||
/// Get the last facet value in the facet database
|
/// Get the last facet value in the facet database
|
||||||
pub(crate) fn get_last_facet_value<'t, BoundCodec, DC>(
|
pub(crate) fn get_last_facet_value<'t, BoundCodec, DC>(
|
||||||
txn: &'t RoTxn,
|
txn: &'t RoTxn<'t>,
|
||||||
db: heed::Database<FacetGroupKeyCodec<BytesRefCodec>, DC>,
|
db: heed::Database<FacetGroupKeyCodec<BytesRefCodec>, DC>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
) -> heed::Result<Option<BoundCodec::DItem>>
|
) -> heed::Result<Option<BoundCodec::DItem>>
|
||||||
|
@ -55,7 +55,7 @@ pub struct Search<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Search<'a> {
|
impl<'a> Search<'a> {
|
||||||
pub fn new(rtxn: &'a heed::RoTxn, index: &'a Index) -> Search<'a> {
|
pub fn new(rtxn: &'a heed::RoTxn<'a>, index: &'a Index) -> Search<'a> {
|
||||||
Search {
|
Search {
|
||||||
query: None,
|
query: None,
|
||||||
filter: None,
|
filter: None,
|
||||||
@ -253,7 +253,7 @@ impl<'a> Search<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Search<'_> {
|
impl fmt::Debug for Search<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let Search {
|
let Search {
|
||||||
query,
|
query,
|
||||||
filter,
|
filter,
|
||||||
|
@ -47,7 +47,7 @@ pub struct DatabaseCache<'ctx> {
|
|||||||
}
|
}
|
||||||
impl<'ctx> DatabaseCache<'ctx> {
|
impl<'ctx> DatabaseCache<'ctx> {
|
||||||
fn get_value<'v, K1, KC, DC>(
|
fn get_value<'v, K1, KC, DC>(
|
||||||
txn: &'ctx RoTxn,
|
txn: &'ctx RoTxn<'_>,
|
||||||
cache_key: K1,
|
cache_key: K1,
|
||||||
db_key: &'v KC::EItem,
|
db_key: &'v KC::EItem,
|
||||||
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
|
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
|
||||||
@ -77,7 +77,7 @@ impl<'ctx> DatabaseCache<'ctx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn get_value_from_keys<'v, K1, KC, DC>(
|
fn get_value_from_keys<'v, K1, KC, DC>(
|
||||||
txn: &'ctx RoTxn,
|
txn: &'ctx RoTxn<'_>,
|
||||||
cache_key: K1,
|
cache_key: K1,
|
||||||
db_keys: &'v [KC::EItem],
|
db_keys: &'v [KC::EItem],
|
||||||
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
|
cache: &mut FxHashMap<K1, Option<Cow<'ctx, [u8]>>>,
|
||||||
@ -99,7 +99,7 @@ impl<'ctx> DatabaseCache<'ctx> {
|
|||||||
.iter()
|
.iter()
|
||||||
.filter_map(|key| db.get(txn, key).transpose())
|
.filter_map(|key| db.get(txn, key).transpose())
|
||||||
.map(|v| v.map(Cow::Borrowed))
|
.map(|v| v.map(Cow::Borrowed))
|
||||||
.collect::<std::result::Result<Vec<Cow<[u8]>>, _>>()?;
|
.collect::<std::result::Result<Vec<Cow<'_, [u8]>>, _>>()?;
|
||||||
|
|
||||||
if bitmaps.is_empty() {
|
if bitmaps.is_empty() {
|
||||||
None
|
None
|
||||||
|
@ -23,7 +23,7 @@ pub struct DistinctOutput {
|
|||||||
/// - `excluded`: the set of document ids that contain a value for the given field that occurs
|
/// - `excluded`: the set of document ids that contain a value for the given field that occurs
|
||||||
/// in the given candidates.
|
/// in the given candidates.
|
||||||
pub fn apply_distinct_rule(
|
pub fn apply_distinct_rule(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
candidates: &RoaringBitmap,
|
candidates: &RoaringBitmap,
|
||||||
) -> Result<DistinctOutput> {
|
) -> Result<DistinctOutput> {
|
||||||
@ -42,7 +42,7 @@ pub fn apply_distinct_rule(
|
|||||||
/// Apply the distinct rule defined by [`apply_distinct_rule`] for a single document id.
|
/// Apply the distinct rule defined by [`apply_distinct_rule`] for a single document id.
|
||||||
pub fn distinct_single_docid(
|
pub fn distinct_single_docid(
|
||||||
index: &Index,
|
index: &Index,
|
||||||
txn: &RoTxn,
|
txn: &RoTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
docid: u32,
|
docid: u32,
|
||||||
excluded: &mut RoaringBitmap,
|
excluded: &mut RoaringBitmap,
|
||||||
@ -72,7 +72,7 @@ pub fn distinct_single_docid(
|
|||||||
/// Return all the docids containing the given value in the given field
|
/// Return all the docids containing the given value in the given field
|
||||||
fn facet_value_docids(
|
fn facet_value_docids(
|
||||||
database: Database<FacetGroupKeyCodec<BytesRefCodec>, FacetGroupValueCodec>,
|
database: Database<FacetGroupKeyCodec<BytesRefCodec>, FacetGroupValueCodec>,
|
||||||
txn: &RoTxn,
|
txn: &RoTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
facet_value: &[u8],
|
facet_value: &[u8],
|
||||||
) -> heed::Result<Option<RoaringBitmap>> {
|
) -> heed::Result<Option<RoaringBitmap>> {
|
||||||
@ -86,7 +86,7 @@ fn facet_number_values<'a>(
|
|||||||
docid: u32,
|
docid: u32,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
txn: &'a RoTxn,
|
txn: &'a RoTxn<'a>,
|
||||||
) -> Result<RoPrefix<'a, FieldDocIdFacetCodec<BytesRefCodec>, Unit>> {
|
) -> Result<RoPrefix<'a, FieldDocIdFacetCodec<BytesRefCodec>, Unit>> {
|
||||||
let key = facet_values_prefix_key(field_id, docid);
|
let key = facet_values_prefix_key(field_id, docid);
|
||||||
|
|
||||||
@ -104,7 +104,7 @@ pub fn facet_string_values<'a>(
|
|||||||
docid: u32,
|
docid: u32,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
txn: &'a RoTxn,
|
txn: &'a RoTxn<'a>,
|
||||||
) -> Result<RoPrefix<'a, FieldDocIdFacetCodec<BytesRefCodec>, Str>> {
|
) -> Result<RoPrefix<'a, FieldDocIdFacetCodec<BytesRefCodec>, Str>> {
|
||||||
let key = facet_values_prefix_key(field_id, docid);
|
let key = facet_values_prefix_key(field_id, docid);
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ fn facet_number_values<'a>(
|
|||||||
docid: u32,
|
docid: u32,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
txn: &'a RoTxn,
|
txn: &'a RoTxn<'a>,
|
||||||
) -> Result<RoPrefix<'a, FieldDocIdFacetCodec<OrderedF64Codec>, Unit>> {
|
) -> Result<RoPrefix<'a, FieldDocIdFacetCodec<OrderedF64Codec>, Unit>> {
|
||||||
let key = facet_values_prefix_key(field_id, docid);
|
let key = facet_values_prefix_key(field_id, docid);
|
||||||
|
|
||||||
@ -109,7 +109,7 @@ impl<Q: RankingRuleQueryTrait> GeoSort<Q> {
|
|||||||
/// Drop the rtree if we don't need it anymore.
|
/// Drop the rtree if we don't need it anymore.
|
||||||
fn fill_buffer(
|
fn fill_buffer(
|
||||||
&mut self,
|
&mut self,
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
geo_candidates: &RoaringBitmap,
|
geo_candidates: &RoaringBitmap,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
debug_assert!(self.field_ids.is_some(), "fill_buffer can't be called without the lat&lng");
|
debug_assert!(self.field_ids.is_some(), "fill_buffer can't be called without the lat&lng");
|
||||||
@ -182,7 +182,7 @@ fn geo_value(
|
|||||||
field_lat: u16,
|
field_lat: u16,
|
||||||
field_lng: u16,
|
field_lng: u16,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn<'_>,
|
||||||
) -> Result<[f64; 2]> {
|
) -> Result<[f64; 2]> {
|
||||||
let extract_geo = |geo_field: u16| -> Result<f64> {
|
let extract_geo = |geo_field: u16| -> Result<f64> {
|
||||||
match facet_number_values(docid, geo_field, index, rtxn)?.next() {
|
match facet_number_values(docid, geo_field, index, rtxn)?.next() {
|
||||||
|
@ -375,7 +375,7 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
|||||||
/// docids and the previous path docids is empty.
|
/// docids and the previous path docids is empty.
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn visit_path_condition<G: RankingRuleGraphTrait>(
|
fn visit_path_condition<G: RankingRuleGraphTrait>(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
graph: &mut RankingRuleGraph<G>,
|
graph: &mut RankingRuleGraph<G>,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
dead_ends_cache: &mut DeadEndsCache<G::Condition>,
|
dead_ends_cache: &mut DeadEndsCache<G::Condition>,
|
||||||
|
@ -20,13 +20,13 @@ pub trait SearchLogger<Q: RankingRuleQueryTrait> {
|
|||||||
fn query_for_initial_universe(&mut self, _query: &Q);
|
fn query_for_initial_universe(&mut self, _query: &Q);
|
||||||
|
|
||||||
/// Logs the ranking rules used to perform the search query
|
/// Logs the ranking rules used to perform the search query
|
||||||
fn ranking_rules(&mut self, _rr: &[BoxRankingRule<Q>]);
|
fn ranking_rules(&mut self, _rr: &[BoxRankingRule<'_, Q>]);
|
||||||
|
|
||||||
/// Logs the start of a ranking rule's iteration.
|
/// Logs the start of a ranking rule's iteration.
|
||||||
fn start_iteration_ranking_rule(
|
fn start_iteration_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
_ranking_rule_idx: usize,
|
_ranking_rule_idx: usize,
|
||||||
_ranking_rule: &dyn RankingRule<Q>,
|
_ranking_rule: &dyn RankingRule<'_, Q>,
|
||||||
_query: &Q,
|
_query: &Q,
|
||||||
_universe: &RoaringBitmap,
|
_universe: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
@ -35,7 +35,7 @@ pub trait SearchLogger<Q: RankingRuleQueryTrait> {
|
|||||||
fn next_bucket_ranking_rule(
|
fn next_bucket_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
_ranking_rule_idx: usize,
|
_ranking_rule_idx: usize,
|
||||||
_ranking_rule: &dyn RankingRule<Q>,
|
_ranking_rule: &dyn RankingRule<'_, Q>,
|
||||||
_universe: &RoaringBitmap,
|
_universe: &RoaringBitmap,
|
||||||
_candidates: &RoaringBitmap,
|
_candidates: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
@ -44,7 +44,7 @@ pub trait SearchLogger<Q: RankingRuleQueryTrait> {
|
|||||||
fn skip_bucket_ranking_rule(
|
fn skip_bucket_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
_ranking_rule_idx: usize,
|
_ranking_rule_idx: usize,
|
||||||
_ranking_rule: &dyn RankingRule<Q>,
|
_ranking_rule: &dyn RankingRule<'_, Q>,
|
||||||
_candidates: &RoaringBitmap,
|
_candidates: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
}
|
}
|
||||||
@ -52,7 +52,7 @@ pub trait SearchLogger<Q: RankingRuleQueryTrait> {
|
|||||||
fn end_iteration_ranking_rule(
|
fn end_iteration_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
_ranking_rule_idx: usize,
|
_ranking_rule_idx: usize,
|
||||||
_ranking_rule: &dyn RankingRule<Q>,
|
_ranking_rule: &dyn RankingRule<'_, Q>,
|
||||||
_universe: &RoaringBitmap,
|
_universe: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
}
|
}
|
||||||
@ -73,7 +73,7 @@ impl<Q: RankingRuleQueryTrait> SearchLogger<Q> for DefaultSearchLogger {
|
|||||||
|
|
||||||
fn query_for_initial_universe(&mut self, _query: &Q) {}
|
fn query_for_initial_universe(&mut self, _query: &Q) {}
|
||||||
|
|
||||||
fn ranking_rules(&mut self, _rr: &[BoxRankingRule<Q>]) {}
|
fn ranking_rules(&mut self, _rr: &[BoxRankingRule<'_, Q>]) {}
|
||||||
|
|
||||||
fn add_to_results(&mut self, _docids: &[u32]) {}
|
fn add_to_results(&mut self, _docids: &[u32]) {}
|
||||||
|
|
||||||
|
@ -69,14 +69,14 @@ impl SearchLogger<QueryGraph> for VisualSearchLogger {
|
|||||||
fn initial_universe(&mut self, universe: &RoaringBitmap) {
|
fn initial_universe(&mut self, universe: &RoaringBitmap) {
|
||||||
self.initial_universe = Some(universe.clone());
|
self.initial_universe = Some(universe.clone());
|
||||||
}
|
}
|
||||||
fn ranking_rules(&mut self, rr: &[BoxRankingRule<QueryGraph>]) {
|
fn ranking_rules(&mut self, rr: &[BoxRankingRule<'_, QueryGraph>]) {
|
||||||
self.ranking_rules_ids = Some(rr.iter().map(|rr| rr.id()).collect());
|
self.ranking_rules_ids = Some(rr.iter().map(|rr| rr.id()).collect());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start_iteration_ranking_rule(
|
fn start_iteration_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
ranking_rule_idx: usize,
|
ranking_rule_idx: usize,
|
||||||
ranking_rule: &dyn RankingRule<QueryGraph>,
|
ranking_rule: &dyn RankingRule<'_, QueryGraph>,
|
||||||
_query: &QueryGraph,
|
_query: &QueryGraph,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
@ -97,7 +97,7 @@ impl SearchLogger<QueryGraph> for VisualSearchLogger {
|
|||||||
fn next_bucket_ranking_rule(
|
fn next_bucket_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
ranking_rule_idx: usize,
|
ranking_rule_idx: usize,
|
||||||
_ranking_rule: &dyn RankingRule<QueryGraph>,
|
_ranking_rule: &dyn RankingRule<'_, QueryGraph>,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
bucket: &RoaringBitmap,
|
bucket: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
@ -110,7 +110,7 @@ impl SearchLogger<QueryGraph> for VisualSearchLogger {
|
|||||||
fn skip_bucket_ranking_rule(
|
fn skip_bucket_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
ranking_rule_idx: usize,
|
ranking_rule_idx: usize,
|
||||||
_ranking_rule: &dyn RankingRule<QueryGraph>,
|
_ranking_rule: &dyn RankingRule<'_, QueryGraph>,
|
||||||
bucket: &RoaringBitmap,
|
bucket: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
self.events.push(SearchEvents::RankingRuleSkipBucket {
|
self.events.push(SearchEvents::RankingRuleSkipBucket {
|
||||||
@ -122,7 +122,7 @@ impl SearchLogger<QueryGraph> for VisualSearchLogger {
|
|||||||
fn end_iteration_ranking_rule(
|
fn end_iteration_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
ranking_rule_idx: usize,
|
ranking_rule_idx: usize,
|
||||||
_ranking_rule: &dyn RankingRule<QueryGraph>,
|
_ranking_rule: &dyn RankingRule<'_, QueryGraph>,
|
||||||
_universe: &RoaringBitmap,
|
_universe: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
self.events.push(SearchEvents::RankingRuleEndIteration { ranking_rule_idx });
|
self.events.push(SearchEvents::RankingRuleEndIteration { ranking_rule_idx });
|
||||||
|
@ -32,7 +32,7 @@ pub struct MatchingWords {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl MatchingWords {
|
impl MatchingWords {
|
||||||
pub fn new(ctx: SearchContext, located_terms: Vec<LocatedQueryTerm>) -> Self {
|
pub fn new(ctx: SearchContext<'_>, located_terms: Vec<LocatedQueryTerm>) -> Self {
|
||||||
let mut phrases = Vec::new();
|
let mut phrases = Vec::new();
|
||||||
let mut words = Vec::new();
|
let mut words = Vec::new();
|
||||||
|
|
||||||
@ -74,7 +74,7 @@ impl MatchingWords {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Try to match the token with one of the located_words.
|
/// Try to match the token with one of the located_words.
|
||||||
fn match_unique_words<'a>(&'a self, token: &Token) -> Option<MatchType<'a>> {
|
fn match_unique_words<'a>(&'a self, token: &Token<'_>) -> Option<MatchType<'a>> {
|
||||||
for located_words in &self.words {
|
for located_words in &self.words {
|
||||||
for word in &located_words.value {
|
for word in &located_words.value {
|
||||||
let word = self.word_interner.get(*word);
|
let word = self.word_interner.get(*word);
|
||||||
@ -166,7 +166,7 @@ impl<'a> PartialMatch<'a> {
|
|||||||
/// - None if the given token breaks the partial match
|
/// - None if the given token breaks the partial match
|
||||||
/// - Partial if the given token matches the partial match but doesn't complete it
|
/// - Partial if the given token matches the partial match but doesn't complete it
|
||||||
/// - Full if the given token completes the partial match
|
/// - Full if the given token completes the partial match
|
||||||
pub fn match_token(self, token: &Token) -> Option<MatchType<'a>> {
|
pub fn match_token(self, token: &Token<'_>) -> Option<MatchType<'a>> {
|
||||||
let Self { mut matching_words, ids, .. } = self;
|
let Self { mut matching_words, ids, .. } = self;
|
||||||
|
|
||||||
let is_matching = match matching_words.first()? {
|
let is_matching = match matching_words.first()? {
|
||||||
@ -198,7 +198,7 @@ impl<'a> PartialMatch<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for MatchingWords {
|
impl fmt::Debug for MatchingWords {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let MatchingWords { word_interner, phrase_interner, phrases, words } = self;
|
let MatchingWords { word_interner, phrase_interner, phrases, words } = self;
|
||||||
|
|
||||||
let phrases: Vec<_> = phrases
|
let phrases: Vec<_> = phrases
|
||||||
|
@ -123,7 +123,7 @@ impl<'t> Matcher<'t, '_> {
|
|||||||
/// some words are counted as matches only if they are close together and in the good order,
|
/// some words are counted as matches only if they are close together and in the good order,
|
||||||
/// compute_partial_match peek into next words to validate if the match is complete.
|
/// compute_partial_match peek into next words to validate if the match is complete.
|
||||||
fn compute_partial_match<'a>(
|
fn compute_partial_match<'a>(
|
||||||
mut partial: PartialMatch,
|
mut partial: PartialMatch<'a>,
|
||||||
token_position: usize,
|
token_position: usize,
|
||||||
word_position: usize,
|
word_position: usize,
|
||||||
words_positions: &mut impl Iterator<Item = (usize, usize, &'a Token<'a>)>,
|
words_positions: &mut impl Iterator<Item = (usize, usize, &'a Token<'a>)>,
|
||||||
@ -244,7 +244,12 @@ impl<'t> Matcher<'t, '_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the bounds in byte index of the crop window.
|
/// Returns the bounds in byte index of the crop window.
|
||||||
fn crop_bounds(&self, tokens: &[Token], matches: &[Match], crop_size: usize) -> (usize, usize) {
|
fn crop_bounds(
|
||||||
|
&self,
|
||||||
|
tokens: &[Token<'_>],
|
||||||
|
matches: &[Match],
|
||||||
|
crop_size: usize,
|
||||||
|
) -> (usize, usize) {
|
||||||
// if there is no match, we start from the beginning of the string by default.
|
// if there is no match, we start from the beginning of the string by default.
|
||||||
let first_match_word_position = matches.first().map(|m| m.word_position).unwrap_or(0);
|
let first_match_word_position = matches.first().map(|m| m.word_position).unwrap_or(0);
|
||||||
let first_match_token_position = matches.first().map(|m| m.token_position).unwrap_or(0);
|
let first_match_token_position = matches.first().map(|m| m.token_position).unwrap_or(0);
|
||||||
@ -505,7 +510,7 @@ mod tests {
|
|||||||
use crate::{execute_search, filtered_universe, SearchContext, TimeBudget};
|
use crate::{execute_search, filtered_universe, SearchContext, TimeBudget};
|
||||||
|
|
||||||
impl<'a> MatcherBuilder<'a> {
|
impl<'a> MatcherBuilder<'a> {
|
||||||
fn new_test(rtxn: &'a heed::RoTxn, index: &'a TempIndex, query: &str) -> Self {
|
fn new_test(rtxn: &'a heed::RoTxn<'a>, index: &'a TempIndex, query: &str) -> Self {
|
||||||
let mut ctx = SearchContext::new(index, rtxn).unwrap();
|
let mut ctx = SearchContext::new(index, rtxn).unwrap();
|
||||||
let universe = filtered_universe(ctx.index, ctx.txn, &None).unwrap();
|
let universe = filtered_universe(ctx.index, ctx.txn, &None).unwrap();
|
||||||
let crate::search::PartialSearchResult { located_query_terms, .. } = execute_search(
|
let crate::search::PartialSearchResult { located_query_terms, .. } = execute_search(
|
||||||
|
@ -183,7 +183,7 @@ impl RestrictedFids {
|
|||||||
|
|
||||||
/// Apply the [`TermsMatchingStrategy`] to the query graph and resolve it.
|
/// Apply the [`TermsMatchingStrategy`] to the query graph and resolve it.
|
||||||
fn resolve_maximally_reduced_query_graph(
|
fn resolve_maximally_reduced_query_graph(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
query_graph: &QueryGraph,
|
query_graph: &QueryGraph,
|
||||||
matching_strategy: TermsMatchingStrategy,
|
matching_strategy: TermsMatchingStrategy,
|
||||||
@ -214,7 +214,7 @@ fn resolve_maximally_reduced_query_graph(
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::universe")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::universe")]
|
||||||
fn resolve_universe(
|
fn resolve_universe(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
initial_universe: &RoaringBitmap,
|
initial_universe: &RoaringBitmap,
|
||||||
query_graph: &QueryGraph,
|
query_graph: &QueryGraph,
|
||||||
matching_strategy: TermsMatchingStrategy,
|
matching_strategy: TermsMatchingStrategy,
|
||||||
@ -231,7 +231,7 @@ fn resolve_universe(
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
||||||
fn resolve_negative_words(
|
fn resolve_negative_words(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
negative_words: &[Word],
|
negative_words: &[Word],
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
let mut negative_bitmap = RoaringBitmap::new();
|
let mut negative_bitmap = RoaringBitmap::new();
|
||||||
@ -245,7 +245,7 @@ fn resolve_negative_words(
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
||||||
fn resolve_negative_phrases(
|
fn resolve_negative_phrases(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
negative_phrases: &[LocatedQueryTerm],
|
negative_phrases: &[LocatedQueryTerm],
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
let mut negative_bitmap = RoaringBitmap::new();
|
let mut negative_bitmap = RoaringBitmap::new();
|
||||||
@ -267,7 +267,7 @@ fn get_ranking_rules_for_placeholder_search<'ctx>(
|
|||||||
let mut sort = false;
|
let mut sort = false;
|
||||||
let mut sorted_fields = HashSet::new();
|
let mut sorted_fields = HashSet::new();
|
||||||
let mut geo_sorted = false;
|
let mut geo_sorted = false;
|
||||||
let mut ranking_rules: Vec<BoxRankingRule<PlaceholderQuery>> = vec![];
|
let mut ranking_rules: Vec<BoxRankingRule<'ctx, PlaceholderQuery>> = vec![];
|
||||||
let settings_ranking_rules = ctx.index.criteria(ctx.txn)?;
|
let settings_ranking_rules = ctx.index.criteria(ctx.txn)?;
|
||||||
for rr in settings_ranking_rules {
|
for rr in settings_ranking_rules {
|
||||||
match rr {
|
match rr {
|
||||||
@ -326,7 +326,7 @@ fn get_ranking_rules_for_vector<'ctx>(
|
|||||||
let mut geo_sorted = false;
|
let mut geo_sorted = false;
|
||||||
|
|
||||||
let mut vector = false;
|
let mut vector = false;
|
||||||
let mut ranking_rules: Vec<BoxRankingRule<PlaceholderQuery>> = vec![];
|
let mut ranking_rules: Vec<BoxRankingRule<'ctx, PlaceholderQuery>> = vec![];
|
||||||
|
|
||||||
let settings_ranking_rules = ctx.index.criteria(ctx.txn)?;
|
let settings_ranking_rules = ctx.index.criteria(ctx.txn)?;
|
||||||
for rr in settings_ranking_rules {
|
for rr in settings_ranking_rules {
|
||||||
@ -406,7 +406,7 @@ fn get_ranking_rules_for_query_graph_search<'ctx>(
|
|||||||
words = true;
|
words = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut ranking_rules: Vec<BoxRankingRule<QueryGraph>> = vec![];
|
let mut ranking_rules: Vec<BoxRankingRule<'ctx, QueryGraph>> = vec![];
|
||||||
let settings_ranking_rules = ctx.index.criteria(ctx.txn)?;
|
let settings_ranking_rules = ctx.index.criteria(ctx.txn)?;
|
||||||
for rr in settings_ranking_rules {
|
for rr in settings_ranking_rules {
|
||||||
// Add Words before any of: typo, proximity, attribute
|
// Add Words before any of: typo, proximity, attribute
|
||||||
@ -552,7 +552,7 @@ fn resolve_sort_criteria<'ctx, Query: RankingRuleQueryTrait>(
|
|||||||
pub fn filtered_universe(
|
pub fn filtered_universe(
|
||||||
index: &Index,
|
index: &Index,
|
||||||
txn: &RoTxn<'_>,
|
txn: &RoTxn<'_>,
|
||||||
filters: &Option<Filter>,
|
filters: &Option<Filter<'_>>,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
Ok(if let Some(filters) = filters {
|
Ok(if let Some(filters) = filters {
|
||||||
filters.evaluate(txn, index)?
|
filters.evaluate(txn, index)?
|
||||||
@ -563,7 +563,7 @@ pub fn filtered_universe(
|
|||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn execute_vector_search(
|
pub fn execute_vector_search(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
vector: &[f32],
|
vector: &[f32],
|
||||||
scoring_strategy: ScoringStrategy,
|
scoring_strategy: ScoringStrategy,
|
||||||
universe: RoaringBitmap,
|
universe: RoaringBitmap,
|
||||||
@ -622,7 +622,7 @@ pub fn execute_vector_search(
|
|||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::main")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::main")]
|
||||||
pub fn execute_search(
|
pub fn execute_search(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
query: Option<&str>,
|
query: Option<&str>,
|
||||||
terms_matching_strategy: TermsMatchingStrategy,
|
terms_matching_strategy: TermsMatchingStrategy,
|
||||||
scoring_strategy: ScoringStrategy,
|
scoring_strategy: ScoringStrategy,
|
||||||
@ -775,7 +775,10 @@ pub fn execute_search(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_sort_criteria(ctx: &SearchContext, sort_criteria: Option<&Vec<AscDesc>>) -> Result<()> {
|
fn check_sort_criteria(
|
||||||
|
ctx: &SearchContext<'_>,
|
||||||
|
sort_criteria: Option<&Vec<AscDesc>>,
|
||||||
|
) -> Result<()> {
|
||||||
let sort_criteria = if let Some(sort_criteria) = sort_criteria {
|
let sort_criteria = if let Some(sort_criteria) = sort_criteria {
|
||||||
sort_criteria
|
sort_criteria
|
||||||
} else {
|
} else {
|
||||||
|
@ -93,7 +93,7 @@ impl QueryGraph {
|
|||||||
/// Build the query graph from the parsed user search query, return an updated list of the located query terms
|
/// Build the query graph from the parsed user search query, return an updated list of the located query terms
|
||||||
/// which contains ngrams.
|
/// which contains ngrams.
|
||||||
pub fn from_query(
|
pub fn from_query(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
// The terms here must be consecutive
|
// The terms here must be consecutive
|
||||||
terms: &[LocatedQueryTerm],
|
terms: &[LocatedQueryTerm],
|
||||||
) -> Result<(QueryGraph, Vec<LocatedQueryTerm>)> {
|
) -> Result<(QueryGraph, Vec<LocatedQueryTerm>)> {
|
||||||
@ -294,7 +294,7 @@ impl QueryGraph {
|
|||||||
|
|
||||||
pub fn removal_order_for_terms_matching_strategy_frequency(
|
pub fn removal_order_for_terms_matching_strategy_frequency(
|
||||||
&self,
|
&self,
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
) -> Result<Vec<SmallBitmap<QueryNode>>> {
|
) -> Result<Vec<SmallBitmap<QueryNode>>> {
|
||||||
// lookup frequency for each term
|
// lookup frequency for each term
|
||||||
let mut term_with_frequency: Vec<(u8, u64)> = {
|
let mut term_with_frequency: Vec<(u8, u64)> = {
|
||||||
@ -337,7 +337,7 @@ impl QueryGraph {
|
|||||||
|
|
||||||
pub fn removal_order_for_terms_matching_strategy_last(
|
pub fn removal_order_for_terms_matching_strategy_last(
|
||||||
&self,
|
&self,
|
||||||
ctx: &SearchContext,
|
ctx: &SearchContext<'_>,
|
||||||
) -> Vec<SmallBitmap<QueryNode>> {
|
) -> Vec<SmallBitmap<QueryNode>> {
|
||||||
let (first_term_idx, last_term_idx) = {
|
let (first_term_idx, last_term_idx) = {
|
||||||
let mut first_term_idx = u8::MAX;
|
let mut first_term_idx = u8::MAX;
|
||||||
@ -370,7 +370,7 @@ impl QueryGraph {
|
|||||||
|
|
||||||
pub fn removal_order_for_terms_matching_strategy(
|
pub fn removal_order_for_terms_matching_strategy(
|
||||||
&self,
|
&self,
|
||||||
ctx: &SearchContext,
|
ctx: &SearchContext<'_>,
|
||||||
order: impl Fn(u8) -> u16,
|
order: impl Fn(u8) -> u16,
|
||||||
) -> Vec<SmallBitmap<QueryNode>> {
|
) -> Vec<SmallBitmap<QueryNode>> {
|
||||||
let mut nodes_to_remove = BTreeMap::<u16, SmallBitmap<QueryNode>>::new();
|
let mut nodes_to_remove = BTreeMap::<u16, SmallBitmap<QueryNode>>::new();
|
||||||
@ -398,7 +398,7 @@ impl QueryGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Number of words in the phrases in this query graph
|
/// Number of words in the phrases in this query graph
|
||||||
pub(crate) fn words_in_phrases_count(&self, ctx: &SearchContext) -> usize {
|
pub(crate) fn words_in_phrases_count(&self, ctx: &SearchContext<'_>) -> usize {
|
||||||
let mut word_count = 0;
|
let mut word_count = 0;
|
||||||
for (_, node) in self.nodes.iter() {
|
for (_, node) in self.nodes.iter() {
|
||||||
match &node.data {
|
match &node.data {
|
||||||
|
@ -27,7 +27,7 @@ pub enum ZeroOrOneTypo {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Interned<QueryTerm> {
|
impl Interned<QueryTerm> {
|
||||||
pub fn compute_fully_if_needed(self, ctx: &mut SearchContext) -> Result<()> {
|
pub fn compute_fully_if_needed(self, ctx: &mut SearchContext<'_>) -> Result<()> {
|
||||||
let s = ctx.term_interner.get_mut(self);
|
let s = ctx.term_interner.get_mut(self);
|
||||||
if s.max_levenshtein_distance <= 1 && s.one_typo.is_uninit() {
|
if s.max_levenshtein_distance <= 1 && s.one_typo.is_uninit() {
|
||||||
assert!(s.two_typo.is_uninit());
|
assert!(s.two_typo.is_uninit());
|
||||||
@ -48,7 +48,7 @@ impl Interned<QueryTerm> {
|
|||||||
|
|
||||||
fn find_zero_typo_prefix_derivations(
|
fn find_zero_typo_prefix_derivations(
|
||||||
word_interned: Interned<String>,
|
word_interned: Interned<String>,
|
||||||
fst: fst::Set<Cow<[u8]>>,
|
fst: fst::Set<Cow<'_, [u8]>>,
|
||||||
word_interner: &mut DedupInterner<String>,
|
word_interner: &mut DedupInterner<String>,
|
||||||
mut visit: impl FnMut(Interned<String>) -> Result<ControlFlow<()>>,
|
mut visit: impl FnMut(Interned<String>) -> Result<ControlFlow<()>>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@ -71,7 +71,7 @@ fn find_zero_typo_prefix_derivations(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn find_zero_one_typo_derivations(
|
fn find_zero_one_typo_derivations(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
word_interned: Interned<String>,
|
word_interned: Interned<String>,
|
||||||
is_prefix: bool,
|
is_prefix: bool,
|
||||||
mut visit: impl FnMut(Interned<String>, ZeroOrOneTypo) -> Result<ControlFlow<()>>,
|
mut visit: impl FnMut(Interned<String>, ZeroOrOneTypo) -> Result<ControlFlow<()>>,
|
||||||
@ -114,7 +114,7 @@ fn find_zero_one_typo_derivations(
|
|||||||
fn find_zero_one_two_typo_derivations(
|
fn find_zero_one_two_typo_derivations(
|
||||||
word_interned: Interned<String>,
|
word_interned: Interned<String>,
|
||||||
is_prefix: bool,
|
is_prefix: bool,
|
||||||
fst: fst::Set<Cow<[u8]>>,
|
fst: fst::Set<Cow<'_, [u8]>>,
|
||||||
word_interner: &mut DedupInterner<String>,
|
word_interner: &mut DedupInterner<String>,
|
||||||
mut visit: impl FnMut(Interned<String>, NumberOfTypos) -> Result<ControlFlow<()>>,
|
mut visit: impl FnMut(Interned<String>, NumberOfTypos) -> Result<ControlFlow<()>>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@ -172,7 +172,7 @@ fn find_zero_one_two_typo_derivations(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn partially_initialized_term_from_word(
|
pub fn partially_initialized_term_from_word(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
word: &str,
|
word: &str,
|
||||||
max_typo: u8,
|
max_typo: u8,
|
||||||
is_prefix: bool,
|
is_prefix: bool,
|
||||||
@ -265,7 +265,7 @@ pub fn partially_initialized_term_from_word(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_split_words(ctx: &mut SearchContext, word: &str) -> Result<Option<Interned<Phrase>>> {
|
fn find_split_words(ctx: &mut SearchContext<'_>, word: &str) -> Result<Option<Interned<Phrase>>> {
|
||||||
if let Some((l, r)) = split_best_frequency(ctx, word)? {
|
if let Some((l, r)) = split_best_frequency(ctx, word)? {
|
||||||
Ok(Some(ctx.phrase_interner.insert(Phrase { words: vec![Some(l), Some(r)] })))
|
Ok(Some(ctx.phrase_interner.insert(Phrase { words: vec![Some(l), Some(r)] })))
|
||||||
} else {
|
} else {
|
||||||
@ -274,7 +274,7 @@ fn find_split_words(ctx: &mut SearchContext, word: &str) -> Result<Option<Intern
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Interned<QueryTerm> {
|
impl Interned<QueryTerm> {
|
||||||
fn initialize_one_typo_subterm(self, ctx: &mut SearchContext) -> Result<()> {
|
fn initialize_one_typo_subterm(self, ctx: &mut SearchContext<'_>) -> Result<()> {
|
||||||
let self_mut = ctx.term_interner.get_mut(self);
|
let self_mut = ctx.term_interner.get_mut(self);
|
||||||
|
|
||||||
let allows_split_words = self_mut.allows_split_words();
|
let allows_split_words = self_mut.allows_split_words();
|
||||||
@ -340,7 +340,7 @@ impl Interned<QueryTerm> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
fn initialize_one_and_two_typo_subterm(self, ctx: &mut SearchContext) -> Result<()> {
|
fn initialize_one_and_two_typo_subterm(self, ctx: &mut SearchContext<'_>) -> Result<()> {
|
||||||
let self_mut = ctx.term_interner.get_mut(self);
|
let self_mut = ctx.term_interner.get_mut(self);
|
||||||
let QueryTerm {
|
let QueryTerm {
|
||||||
original,
|
original,
|
||||||
@ -406,7 +406,7 @@ impl Interned<QueryTerm> {
|
|||||||
///
|
///
|
||||||
/// Return `None` if the original word cannot be split.
|
/// Return `None` if the original word cannot be split.
|
||||||
fn split_best_frequency(
|
fn split_best_frequency(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
original: &str,
|
original: &str,
|
||||||
) -> Result<Option<(Interned<String>, Interned<String>)>> {
|
) -> Result<Option<(Interned<String>, Interned<String>)>> {
|
||||||
let chars = original.char_indices().skip(1);
|
let chars = original.char_indices().skip(1);
|
||||||
|
@ -128,7 +128,7 @@ impl QueryTermSubset {
|
|||||||
pub fn make_mandatory(&mut self) {
|
pub fn make_mandatory(&mut self) {
|
||||||
self.mandatory = true;
|
self.mandatory = true;
|
||||||
}
|
}
|
||||||
pub fn exact_term(&self, ctx: &SearchContext) -> Option<ExactTerm> {
|
pub fn exact_term(&self, ctx: &SearchContext<'_>) -> Option<ExactTerm> {
|
||||||
let full_query_term = ctx.term_interner.get(self.original);
|
let full_query_term = ctx.term_interner.get(self.original);
|
||||||
if full_query_term.ngram_words.is_some() {
|
if full_query_term.ngram_words.is_some() {
|
||||||
return None;
|
return None;
|
||||||
@ -174,7 +174,7 @@ impl QueryTermSubset {
|
|||||||
self.two_typo_subset.intersect(&other.two_typo_subset);
|
self.two_typo_subset.intersect(&other.two_typo_subset);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn use_prefix_db(&self, ctx: &SearchContext) -> Option<Word> {
|
pub fn use_prefix_db(&self, ctx: &SearchContext<'_>) -> Option<Word> {
|
||||||
let original = ctx.term_interner.get(self.original);
|
let original = ctx.term_interner.get(self.original);
|
||||||
let use_prefix_db = original.zero_typo.use_prefix_db?;
|
let use_prefix_db = original.zero_typo.use_prefix_db?;
|
||||||
let word = match &self.zero_typo_subset {
|
let word = match &self.zero_typo_subset {
|
||||||
@ -198,7 +198,7 @@ impl QueryTermSubset {
|
|||||||
}
|
}
|
||||||
pub fn all_single_words_except_prefix_db(
|
pub fn all_single_words_except_prefix_db(
|
||||||
&self,
|
&self,
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
) -> Result<BTreeSet<Word>> {
|
) -> Result<BTreeSet<Word>> {
|
||||||
let mut result = BTreeSet::default();
|
let mut result = BTreeSet::default();
|
||||||
if !self.one_typo_subset.is_empty() || !self.two_typo_subset.is_empty() {
|
if !self.one_typo_subset.is_empty() || !self.two_typo_subset.is_empty() {
|
||||||
@ -290,7 +290,7 @@ impl QueryTermSubset {
|
|||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
pub fn all_phrases(&self, ctx: &mut SearchContext) -> Result<BTreeSet<Interned<Phrase>>> {
|
pub fn all_phrases(&self, ctx: &mut SearchContext<'_>) -> Result<BTreeSet<Interned<Phrase>>> {
|
||||||
let mut result = BTreeSet::default();
|
let mut result = BTreeSet::default();
|
||||||
|
|
||||||
if !self.one_typo_subset.is_empty() {
|
if !self.one_typo_subset.is_empty() {
|
||||||
@ -328,7 +328,7 @@ impl QueryTermSubset {
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn original_phrase(&self, ctx: &SearchContext) -> Option<Interned<Phrase>> {
|
pub fn original_phrase(&self, ctx: &SearchContext<'_>) -> Option<Interned<Phrase>> {
|
||||||
let t = ctx.term_interner.get(self.original);
|
let t = ctx.term_interner.get(self.original);
|
||||||
if let Some(p) = t.zero_typo.phrase {
|
if let Some(p) = t.zero_typo.phrase {
|
||||||
if self.zero_typo_subset.contains_phrase(p) {
|
if self.zero_typo_subset.contains_phrase(p) {
|
||||||
@ -337,7 +337,7 @@ impl QueryTermSubset {
|
|||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
pub fn max_typo_cost(&self, ctx: &SearchContext) -> u8 {
|
pub fn max_typo_cost(&self, ctx: &SearchContext<'_>) -> u8 {
|
||||||
let t = ctx.term_interner.get(self.original);
|
let t = ctx.term_interner.get(self.original);
|
||||||
match t.max_levenshtein_distance {
|
match t.max_levenshtein_distance {
|
||||||
0 => {
|
0 => {
|
||||||
@ -368,7 +368,7 @@ impl QueryTermSubset {
|
|||||||
_ => panic!(),
|
_ => panic!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn keep_only_exact_term(&mut self, ctx: &SearchContext) {
|
pub fn keep_only_exact_term(&mut self, ctx: &SearchContext<'_>) {
|
||||||
if let Some(term) = self.exact_term(ctx) {
|
if let Some(term) = self.exact_term(ctx) {
|
||||||
match term {
|
match term {
|
||||||
ExactTerm::Phrase(p) => {
|
ExactTerm::Phrase(p) => {
|
||||||
@ -399,7 +399,7 @@ impl QueryTermSubset {
|
|||||||
pub fn clear_two_typo_subset(&mut self) {
|
pub fn clear_two_typo_subset(&mut self) {
|
||||||
self.two_typo_subset = NTypoTermSubset::Nothing;
|
self.two_typo_subset = NTypoTermSubset::Nothing;
|
||||||
}
|
}
|
||||||
pub fn description(&self, ctx: &SearchContext) -> String {
|
pub fn description(&self, ctx: &SearchContext<'_>) -> String {
|
||||||
let t = ctx.term_interner.get(self.original);
|
let t = ctx.term_interner.get(self.original);
|
||||||
ctx.word_interner.get(t.original).to_owned()
|
ctx.word_interner.get(t.original).to_owned()
|
||||||
}
|
}
|
||||||
@ -446,7 +446,7 @@ impl QueryTerm {
|
|||||||
|
|
||||||
impl Interned<QueryTerm> {
|
impl Interned<QueryTerm> {
|
||||||
/// Return the original word from the given query term
|
/// Return the original word from the given query term
|
||||||
fn original_single_word(self, ctx: &SearchContext) -> Option<Interned<String>> {
|
fn original_single_word(self, ctx: &SearchContext<'_>) -> Option<Interned<String>> {
|
||||||
let self_ = ctx.term_interner.get(self);
|
let self_ = ctx.term_interner.get(self);
|
||||||
if self_.ngram_words.is_some() {
|
if self_.ngram_words.is_some() {
|
||||||
None
|
None
|
||||||
@ -477,7 +477,7 @@ impl QueryTerm {
|
|||||||
pub fn is_prefix(&self) -> bool {
|
pub fn is_prefix(&self) -> bool {
|
||||||
self.is_prefix
|
self.is_prefix
|
||||||
}
|
}
|
||||||
pub fn original_word(&self, ctx: &SearchContext) -> String {
|
pub fn original_word(&self, ctx: &SearchContext<'_>) -> String {
|
||||||
ctx.word_interner.get(self.original).clone()
|
ctx.word_interner.get(self.original).clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,8 +23,8 @@ pub struct ExtractedTokens {
|
|||||||
/// Convert the tokenised search query into a list of located query terms.
|
/// Convert the tokenised search query into a list of located query terms.
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
||||||
pub fn located_query_terms_from_tokens(
|
pub fn located_query_terms_from_tokens(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
query: NormalizedTokenIter,
|
query: NormalizedTokenIter<'_, '_>,
|
||||||
words_limit: Option<usize>,
|
words_limit: Option<usize>,
|
||||||
) -> Result<ExtractedTokens> {
|
) -> Result<ExtractedTokens> {
|
||||||
let nbr_typos = number_of_typos_allowed(ctx)?;
|
let nbr_typos = number_of_typos_allowed(ctx)?;
|
||||||
@ -214,7 +214,7 @@ pub fn number_of_typos_allowed<'ctx>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_ngram(
|
pub fn make_ngram(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
terms: &[LocatedQueryTerm],
|
terms: &[LocatedQueryTerm],
|
||||||
number_of_typos_allowed: &impl Fn(&str) -> u8,
|
number_of_typos_allowed: &impl Fn(&str) -> u8,
|
||||||
) -> Result<Option<LocatedQueryTerm>> {
|
) -> Result<Option<LocatedQueryTerm>> {
|
||||||
@ -297,7 +297,12 @@ impl PhraseBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// precondition: token has kind Word or StopWord
|
// precondition: token has kind Word or StopWord
|
||||||
fn push_word(&mut self, ctx: &mut SearchContext, token: &charabia::Token, position: u16) {
|
fn push_word(
|
||||||
|
&mut self,
|
||||||
|
ctx: &mut SearchContext<'_>,
|
||||||
|
token: &charabia::Token<'_>,
|
||||||
|
position: u16,
|
||||||
|
) {
|
||||||
if self.is_empty() {
|
if self.is_empty() {
|
||||||
self.start = position;
|
self.start = position;
|
||||||
}
|
}
|
||||||
@ -311,7 +316,7 @@ impl PhraseBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build(self, ctx: &mut SearchContext) -> Option<LocatedQueryTerm> {
|
fn build(self, ctx: &mut SearchContext<'_>) -> Option<LocatedQueryTerm> {
|
||||||
if self.is_empty() {
|
if self.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -10,11 +10,11 @@ pub struct Phrase {
|
|||||||
pub words: Vec<Option<Interned<String>>>,
|
pub words: Vec<Option<Interned<String>>>,
|
||||||
}
|
}
|
||||||
impl Interned<Phrase> {
|
impl Interned<Phrase> {
|
||||||
pub fn description(self, ctx: &SearchContext) -> String {
|
pub fn description(self, ctx: &SearchContext<'_>) -> String {
|
||||||
let p = ctx.phrase_interner.get(self);
|
let p = ctx.phrase_interner.get(self);
|
||||||
p.words.iter().flatten().map(|w| ctx.word_interner.get(*w)).join(" ")
|
p.words.iter().flatten().map(|w| ctx.word_interner.get(*w)).join(" ")
|
||||||
}
|
}
|
||||||
pub fn words(self, ctx: &SearchContext) -> Vec<Option<Interned<String>>> {
|
pub fn words(self, ctx: &SearchContext<'_>) -> Vec<Option<Interned<String>>> {
|
||||||
let p = ctx.phrase_interner.get(self);
|
let p = ctx.phrase_interner.get(self);
|
||||||
p.words.clone()
|
p.words.clone()
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,7 @@ use crate::Result;
|
|||||||
impl<G: RankingRuleGraphTrait> RankingRuleGraph<G> {
|
impl<G: RankingRuleGraphTrait> RankingRuleGraph<G> {
|
||||||
/// Build the ranking rule graph from the given query graph
|
/// Build the ranking rule graph from the given query graph
|
||||||
pub fn build(
|
pub fn build(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
query_graph: QueryGraph,
|
query_graph: QueryGraph,
|
||||||
cost_of_ignoring_node: MappedInterner<QueryNode, Option<(u32, SmallBitmap<QueryNode>)>>,
|
cost_of_ignoring_node: MappedInterner<QueryNode, Option<(u32, SmallBitmap<QueryNode>)>>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
|
@ -117,7 +117,7 @@ impl<'a, G: RankingRuleGraphTrait> PathVisitor<'a, G> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// See module documentation
|
/// See module documentation
|
||||||
pub fn visit_paths(mut self, visit: VisitFn<G>) -> Result<()> {
|
pub fn visit_paths(mut self, visit: VisitFn<'_, G>) -> Result<()> {
|
||||||
let _ =
|
let _ =
|
||||||
self.state.visit_node(self.ctx.graph.query_graph.root_node, visit, &mut self.ctx)?;
|
self.state.visit_node(self.ctx.graph.query_graph.root_node, visit, &mut self.ctx)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -132,8 +132,8 @@ impl<G: RankingRuleGraphTrait> VisitorState<G> {
|
|||||||
fn visit_node(
|
fn visit_node(
|
||||||
&mut self,
|
&mut self,
|
||||||
from_node: Interned<QueryNode>,
|
from_node: Interned<QueryNode>,
|
||||||
visit: VisitFn<G>,
|
visit: VisitFn<'_, G>,
|
||||||
ctx: &mut VisitorContext<G>,
|
ctx: &mut VisitorContext<'_, G>,
|
||||||
) -> Result<ControlFlow<(), bool>> {
|
) -> Result<ControlFlow<(), bool>> {
|
||||||
// any valid path will be found from this point
|
// any valid path will be found from this point
|
||||||
// if a valid path was found, then we know that the DeadEndsCache may have been updated,
|
// if a valid path was found, then we know that the DeadEndsCache may have been updated,
|
||||||
@ -189,8 +189,8 @@ impl<G: RankingRuleGraphTrait> VisitorState<G> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
dest_node: Interned<QueryNode>,
|
dest_node: Interned<QueryNode>,
|
||||||
edge_new_nodes_to_skip: &SmallBitmap<QueryNode>,
|
edge_new_nodes_to_skip: &SmallBitmap<QueryNode>,
|
||||||
visit: VisitFn<G>,
|
visit: VisitFn<'_, G>,
|
||||||
ctx: &mut VisitorContext<G>,
|
ctx: &mut VisitorContext<'_, G>,
|
||||||
) -> Result<ControlFlow<(), bool>> {
|
) -> Result<ControlFlow<(), bool>> {
|
||||||
if !ctx
|
if !ctx
|
||||||
.all_costs_from_node
|
.all_costs_from_node
|
||||||
@ -228,8 +228,8 @@ impl<G: RankingRuleGraphTrait> VisitorState<G> {
|
|||||||
condition: Interned<G::Condition>,
|
condition: Interned<G::Condition>,
|
||||||
dest_node: Interned<QueryNode>,
|
dest_node: Interned<QueryNode>,
|
||||||
edge_new_nodes_to_skip: &SmallBitmap<QueryNode>,
|
edge_new_nodes_to_skip: &SmallBitmap<QueryNode>,
|
||||||
visit: VisitFn<G>,
|
visit: VisitFn<'_, G>,
|
||||||
ctx: &mut VisitorContext<G>,
|
ctx: &mut VisitorContext<'_, G>,
|
||||||
) -> Result<ControlFlow<(), bool>> {
|
) -> Result<ControlFlow<(), bool>> {
|
||||||
assert!(dest_node != ctx.graph.query_graph.end_node);
|
assert!(dest_node != ctx.graph.query_graph.end_node);
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ impl<G: RankingRuleGraphTrait> ConditionDocIdsCache<G> {
|
|||||||
/// and inserted in the cache.
|
/// and inserted in the cache.
|
||||||
pub fn get_computed_condition<'s>(
|
pub fn get_computed_condition<'s>(
|
||||||
&'s mut self,
|
&'s mut self,
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
interned_condition: Interned<G::Condition>,
|
interned_condition: Interned<G::Condition>,
|
||||||
graph: &mut RankingRuleGraph<G>,
|
graph: &mut RankingRuleGraph<G>,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
|
@ -17,7 +17,7 @@ pub enum ExactnessCondition {
|
|||||||
pub enum ExactnessGraph {}
|
pub enum ExactnessGraph {}
|
||||||
|
|
||||||
fn compute_docids(
|
fn compute_docids(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
dest_node: &LocatedQueryTermSubset,
|
dest_node: &LocatedQueryTermSubset,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
@ -46,7 +46,7 @@ impl RankingRuleGraphTrait for ExactnessGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::exactness")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::exactness")]
|
||||||
fn resolve_condition(
|
fn resolve_condition(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
condition: &Self::Condition,
|
condition: &Self::Condition,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<ComputedCondition> {
|
) -> Result<ComputedCondition> {
|
||||||
@ -74,7 +74,7 @@ impl RankingRuleGraphTrait for ExactnessGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::exactness")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::exactness")]
|
||||||
fn build_edges(
|
fn build_edges(
|
||||||
_ctx: &mut SearchContext,
|
_ctx: &mut SearchContext<'_>,
|
||||||
conditions_interner: &mut DedupInterner<Self::Condition>,
|
conditions_interner: &mut DedupInterner<Self::Condition>,
|
||||||
_source_node: Option<&LocatedQueryTermSubset>,
|
_source_node: Option<&LocatedQueryTermSubset>,
|
||||||
dest_node: &LocatedQueryTermSubset,
|
dest_node: &LocatedQueryTermSubset,
|
||||||
|
@ -22,7 +22,7 @@ impl RankingRuleGraphTrait for FidGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::fid")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::fid")]
|
||||||
fn resolve_condition(
|
fn resolve_condition(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
condition: &Self::Condition,
|
condition: &Self::Condition,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<ComputedCondition> {
|
) -> Result<ComputedCondition> {
|
||||||
@ -47,7 +47,7 @@ impl RankingRuleGraphTrait for FidGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::fid")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::fid")]
|
||||||
fn build_edges(
|
fn build_edges(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
conditions_interner: &mut DedupInterner<Self::Condition>,
|
conditions_interner: &mut DedupInterner<Self::Condition>,
|
||||||
_from: Option<&LocatedQueryTermSubset>,
|
_from: Option<&LocatedQueryTermSubset>,
|
||||||
to_term: &LocatedQueryTermSubset,
|
to_term: &LocatedQueryTermSubset,
|
||||||
|
@ -99,14 +99,14 @@ pub trait RankingRuleGraphTrait: Sized + 'static {
|
|||||||
/// Compute the document ids associated with the given edge condition,
|
/// Compute the document ids associated with the given edge condition,
|
||||||
/// restricted to the given universe.
|
/// restricted to the given universe.
|
||||||
fn resolve_condition(
|
fn resolve_condition(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
condition: &Self::Condition,
|
condition: &Self::Condition,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<ComputedCondition>;
|
) -> Result<ComputedCondition>;
|
||||||
|
|
||||||
/// Return the costs and conditions of the edges going from the source node to the destination node
|
/// Return the costs and conditions of the edges going from the source node to the destination node
|
||||||
fn build_edges(
|
fn build_edges(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
conditions_interner: &mut DedupInterner<Self::Condition>,
|
conditions_interner: &mut DedupInterner<Self::Condition>,
|
||||||
source_node: Option<&LocatedQueryTermSubset>,
|
source_node: Option<&LocatedQueryTermSubset>,
|
||||||
dest_node: &LocatedQueryTermSubset,
|
dest_node: &LocatedQueryTermSubset,
|
||||||
|
@ -22,7 +22,7 @@ impl RankingRuleGraphTrait for PositionGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::position")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::position")]
|
||||||
fn resolve_condition(
|
fn resolve_condition(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
condition: &Self::Condition,
|
condition: &Self::Condition,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<ComputedCondition> {
|
) -> Result<ComputedCondition> {
|
||||||
@ -47,7 +47,7 @@ impl RankingRuleGraphTrait for PositionGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::position")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::position")]
|
||||||
fn build_edges(
|
fn build_edges(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
conditions_interner: &mut DedupInterner<Self::Condition>,
|
conditions_interner: &mut DedupInterner<Self::Condition>,
|
||||||
_from: Option<&LocatedQueryTermSubset>,
|
_from: Option<&LocatedQueryTermSubset>,
|
||||||
to_term: &LocatedQueryTermSubset,
|
to_term: &LocatedQueryTermSubset,
|
||||||
|
@ -8,7 +8,7 @@ use crate::search::new::SearchContext;
|
|||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
||||||
pub fn build_edges(
|
pub fn build_edges(
|
||||||
_ctx: &mut SearchContext,
|
_ctx: &mut SearchContext<'_>,
|
||||||
conditions_interner: &mut DedupInterner<ProximityCondition>,
|
conditions_interner: &mut DedupInterner<ProximityCondition>,
|
||||||
left_term: Option<&LocatedQueryTermSubset>,
|
left_term: Option<&LocatedQueryTermSubset>,
|
||||||
right_term: &LocatedQueryTermSubset,
|
right_term: &LocatedQueryTermSubset,
|
||||||
|
@ -13,7 +13,7 @@ use crate::search::new::{SearchContext, Word};
|
|||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
||||||
pub fn compute_docids(
|
pub fn compute_docids(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
condition: &ProximityCondition,
|
condition: &ProximityCondition,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<ComputedCondition> {
|
) -> Result<ComputedCondition> {
|
||||||
@ -110,7 +110,7 @@ pub fn compute_docids(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn compute_prefix_edges(
|
fn compute_prefix_edges(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
left_word: Interned<String>,
|
left_word: Interned<String>,
|
||||||
right_prefix: Interned<String>,
|
right_prefix: Interned<String>,
|
||||||
left_phrase: Option<Interned<Phrase>>,
|
left_phrase: Option<Interned<Phrase>>,
|
||||||
@ -166,7 +166,7 @@ fn compute_prefix_edges(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn compute_non_prefix_edges(
|
fn compute_non_prefix_edges(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
word1: Interned<String>,
|
word1: Interned<String>,
|
||||||
word2: Interned<String>,
|
word2: Interned<String>,
|
||||||
left_phrase: Option<Interned<Phrase>>,
|
left_phrase: Option<Interned<Phrase>>,
|
||||||
@ -209,7 +209,7 @@ fn compute_non_prefix_edges(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn last_words_of_term_derivations(
|
fn last_words_of_term_derivations(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
t: &QueryTermSubset,
|
t: &QueryTermSubset,
|
||||||
) -> Result<BTreeSet<(Option<Interned<Phrase>>, Word)>> {
|
) -> Result<BTreeSet<(Option<Interned<Phrase>>, Word)>> {
|
||||||
let mut result = BTreeSet::new();
|
let mut result = BTreeSet::new();
|
||||||
@ -228,7 +228,7 @@ fn last_words_of_term_derivations(
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
fn first_word_of_term_iter(
|
fn first_word_of_term_iter(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
t: &QueryTermSubset,
|
t: &QueryTermSubset,
|
||||||
) -> Result<BTreeSet<(Interned<String>, Option<Interned<Phrase>>)>> {
|
) -> Result<BTreeSet<(Interned<String>, Option<Interned<Phrase>>)>> {
|
||||||
let mut result = BTreeSet::new();
|
let mut result = BTreeSet::new();
|
||||||
|
@ -23,7 +23,7 @@ impl RankingRuleGraphTrait for ProximityGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::proximity")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::proximity")]
|
||||||
fn resolve_condition(
|
fn resolve_condition(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
condition: &Self::Condition,
|
condition: &Self::Condition,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<ComputedCondition> {
|
) -> Result<ComputedCondition> {
|
||||||
@ -32,7 +32,7 @@ impl RankingRuleGraphTrait for ProximityGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::proximity")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::proximity")]
|
||||||
fn build_edges(
|
fn build_edges(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
conditions_interner: &mut DedupInterner<Self::Condition>,
|
conditions_interner: &mut DedupInterner<Self::Condition>,
|
||||||
source_term: Option<&LocatedQueryTermSubset>,
|
source_term: Option<&LocatedQueryTermSubset>,
|
||||||
dest_term: &LocatedQueryTermSubset,
|
dest_term: &LocatedQueryTermSubset,
|
||||||
|
@ -21,7 +21,7 @@ impl RankingRuleGraphTrait for TypoGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::typo")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::typo")]
|
||||||
fn resolve_condition(
|
fn resolve_condition(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
condition: &Self::Condition,
|
condition: &Self::Condition,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<ComputedCondition> {
|
) -> Result<ComputedCondition> {
|
||||||
@ -40,7 +40,7 @@ impl RankingRuleGraphTrait for TypoGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::typo")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::typo")]
|
||||||
fn build_edges(
|
fn build_edges(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
conditions_interner: &mut DedupInterner<Self::Condition>,
|
conditions_interner: &mut DedupInterner<Self::Condition>,
|
||||||
_from: Option<&LocatedQueryTermSubset>,
|
_from: Option<&LocatedQueryTermSubset>,
|
||||||
to_term: &LocatedQueryTermSubset,
|
to_term: &LocatedQueryTermSubset,
|
||||||
|
@ -20,7 +20,7 @@ impl RankingRuleGraphTrait for WordsGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::words")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::words")]
|
||||||
fn resolve_condition(
|
fn resolve_condition(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
condition: &Self::Condition,
|
condition: &Self::Condition,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<ComputedCondition> {
|
) -> Result<ComputedCondition> {
|
||||||
@ -39,7 +39,7 @@ impl RankingRuleGraphTrait for WordsGraph {
|
|||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::words")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::words")]
|
||||||
fn build_edges(
|
fn build_edges(
|
||||||
_ctx: &mut SearchContext,
|
_ctx: &mut SearchContext<'_>,
|
||||||
conditions_interner: &mut DedupInterner<Self::Condition>,
|
conditions_interner: &mut DedupInterner<Self::Condition>,
|
||||||
_from: Option<&LocatedQueryTermSubset>,
|
_from: Option<&LocatedQueryTermSubset>,
|
||||||
to_term: &LocatedQueryTermSubset,
|
to_term: &LocatedQueryTermSubset,
|
||||||
|
@ -30,7 +30,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn compute_query_term_subset_docids(
|
pub fn compute_query_term_subset_docids(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
term: &QueryTermSubset,
|
term: &QueryTermSubset,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
let mut docids = RoaringBitmap::new();
|
let mut docids = RoaringBitmap::new();
|
||||||
@ -53,7 +53,7 @@ pub fn compute_query_term_subset_docids(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_query_term_subset_docids_within_field_id(
|
pub fn compute_query_term_subset_docids_within_field_id(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
term: &QueryTermSubset,
|
term: &QueryTermSubset,
|
||||||
fid: u16,
|
fid: u16,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
@ -86,7 +86,7 @@ pub fn compute_query_term_subset_docids_within_field_id(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_query_term_subset_docids_within_position(
|
pub fn compute_query_term_subset_docids_within_position(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
term: &QueryTermSubset,
|
term: &QueryTermSubset,
|
||||||
position: u16,
|
position: u16,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
@ -121,7 +121,7 @@ pub fn compute_query_term_subset_docids_within_position(
|
|||||||
|
|
||||||
/// Returns the subset of the input universe that satisfies the contraints of the input query graph.
|
/// Returns the subset of the input universe that satisfies the contraints of the input query graph.
|
||||||
pub fn compute_query_graph_docids(
|
pub fn compute_query_graph_docids(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
q: &QueryGraph,
|
q: &QueryGraph,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
@ -178,7 +178,7 @@ pub fn compute_query_graph_docids(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_phrase_docids(
|
pub fn compute_phrase_docids(
|
||||||
ctx: &mut SearchContext,
|
ctx: &mut SearchContext<'_>,
|
||||||
phrase: Interned<Phrase>,
|
phrase: Interned<Phrase>,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
let Phrase { words } = ctx.phrase_interner.get(phrase).clone();
|
let Phrase { words } = ctx.phrase_interner.get(phrase).clone();
|
||||||
|
@ -56,7 +56,7 @@ pub struct Sort<'ctx, Query> {
|
|||||||
impl<'ctx, Query> Sort<'ctx, Query> {
|
impl<'ctx, Query> Sort<'ctx, Query> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
index: &Index,
|
index: &Index,
|
||||||
rtxn: &'ctx heed::RoTxn,
|
rtxn: &'ctx heed::RoTxn<'ctx>,
|
||||||
field_name: String,
|
field_name: String,
|
||||||
is_ascending: bool,
|
is_ascending: bool,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
@ -74,7 +74,7 @@ impl<'ctx, Query> Sort<'ctx, Query> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn must_redact(index: &Index, rtxn: &'ctx heed::RoTxn, field_name: &str) -> Result<bool> {
|
fn must_redact(index: &Index, rtxn: &'ctx heed::RoTxn<'ctx>, field_name: &str) -> Result<bool> {
|
||||||
let Some(displayed_fields) = index.displayed_fields(rtxn)? else {
|
let Some(displayed_fields) = index.displayed_fields(rtxn)? else {
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
};
|
};
|
||||||
@ -97,7 +97,7 @@ impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Sort<'ctx,
|
|||||||
parent_candidates: &RoaringBitmap,
|
parent_candidates: &RoaringBitmap,
|
||||||
parent_query: &Query,
|
parent_query: &Query,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let iter: RankingRuleOutputIterWrapper<Query> = match self.field_id {
|
let iter: RankingRuleOutputIterWrapper<'ctx, Query> = match self.field_id {
|
||||||
Some(field_id) => {
|
Some(field_id) => {
|
||||||
let number_db = ctx
|
let number_db = ctx
|
||||||
.index
|
.index
|
||||||
|
@ -207,7 +207,7 @@ fn create_index() -> TempIndex {
|
|||||||
|
|
||||||
fn verify_distinct(
|
fn verify_distinct(
|
||||||
index: &Index,
|
index: &Index,
|
||||||
txn: &RoTxn,
|
txn: &RoTxn<'_>,
|
||||||
distinct: Option<&str>,
|
distinct: Option<&str>,
|
||||||
docids: &[u32],
|
docids: &[u32],
|
||||||
) -> Vec<String> {
|
) -> Vec<String> {
|
||||||
|
@ -18,7 +18,7 @@ pub mod words_tms;
|
|||||||
|
|
||||||
fn collect_field_values(
|
fn collect_field_values(
|
||||||
index: &crate::Index,
|
index: &crate::Index,
|
||||||
txn: &heed::RoTxn,
|
txn: &heed::RoTxn<'_>,
|
||||||
fid: &str,
|
fid: &str,
|
||||||
docids: &[u32],
|
docids: &[u32],
|
||||||
) -> Vec<String> {
|
) -> Vec<String> {
|
||||||
|
@ -20,7 +20,7 @@ pub struct VectorSort<Q: RankingRuleQueryTrait> {
|
|||||||
|
|
||||||
impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
|
impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
ctx: &SearchContext,
|
ctx: &SearchContext<'_>,
|
||||||
target: Vec<f32>,
|
target: Vec<f32>,
|
||||||
vector_candidates: RoaringBitmap,
|
vector_candidates: RoaringBitmap,
|
||||||
limit: usize,
|
limit: usize,
|
||||||
|
@ -535,7 +535,7 @@ pub fn convert_snap_to_hash_if_needed<'snap>(
|
|||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! make_db_snap_from_iter {
|
macro_rules! make_db_snap_from_iter {
|
||||||
($index:ident, $name:ident, |$vars:pat| $push:block) => {{
|
($index:ident, $name:ident, |$vars:pat_param| $push:block) => {{
|
||||||
let rtxn = $index.read_txn().unwrap();
|
let rtxn = $index.read_txn().unwrap();
|
||||||
let iter = $index.$name.iter(&rtxn).unwrap();
|
let iter = $index.$name.iter(&rtxn).unwrap();
|
||||||
let mut snap = String::new();
|
let mut snap = String::new();
|
||||||
|
@ -36,7 +36,7 @@ impl Key for DelAdd {
|
|||||||
/// Addition: put all the values under DelAdd::Addition,
|
/// Addition: put all the values under DelAdd::Addition,
|
||||||
/// DeletionAndAddition: put all the values under DelAdd::Deletion and DelAdd::Addition,
|
/// DeletionAndAddition: put all the values under DelAdd::Deletion and DelAdd::Addition,
|
||||||
pub fn into_del_add_obkv<K: obkv::Key + PartialOrd>(
|
pub fn into_del_add_obkv<K: obkv::Key + PartialOrd>(
|
||||||
reader: obkv::KvReader<K>,
|
reader: obkv::KvReader<'_, K>,
|
||||||
operation: DelAddOperation,
|
operation: DelAddOperation,
|
||||||
buffer: &mut Vec<u8>,
|
buffer: &mut Vec<u8>,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
@ -46,7 +46,7 @@ pub fn into_del_add_obkv<K: obkv::Key + PartialOrd>(
|
|||||||
/// Akin to the [into_del_add_obkv] function but lets you
|
/// Akin to the [into_del_add_obkv] function but lets you
|
||||||
/// conditionally define the `DelAdd` variant based on the obkv key.
|
/// conditionally define the `DelAdd` variant based on the obkv key.
|
||||||
pub fn into_del_add_obkv_conditional_operation<K, F>(
|
pub fn into_del_add_obkv_conditional_operation<K, F>(
|
||||||
reader: obkv::KvReader<K>,
|
reader: obkv::KvReader<'_, K>,
|
||||||
buffer: &mut Vec<u8>,
|
buffer: &mut Vec<u8>,
|
||||||
operation: F,
|
operation: F,
|
||||||
) -> std::io::Result<()>
|
) -> std::io::Result<()>
|
||||||
@ -86,8 +86,8 @@ pub enum DelAddOperation {
|
|||||||
/// putting each deletion obkv's keys under an DelAdd::Deletion
|
/// putting each deletion obkv's keys under an DelAdd::Deletion
|
||||||
/// and putting each addition obkv's keys under an DelAdd::Addition
|
/// and putting each addition obkv's keys under an DelAdd::Addition
|
||||||
pub fn del_add_from_two_obkvs<K: obkv::Key + PartialOrd + Ord>(
|
pub fn del_add_from_two_obkvs<K: obkv::Key + PartialOrd + Ord>(
|
||||||
deletion: &obkv::KvReader<K>,
|
deletion: &obkv::KvReader<'_, K>,
|
||||||
addition: &obkv::KvReader<K>,
|
addition: &obkv::KvReader<'_, K>,
|
||||||
buffer: &mut Vec<u8>,
|
buffer: &mut Vec<u8>,
|
||||||
) -> Result<(), std::io::Error> {
|
) -> Result<(), std::io::Error> {
|
||||||
use itertools::merge_join_by;
|
use itertools::merge_join_by;
|
||||||
@ -121,7 +121,7 @@ pub fn del_add_from_two_obkvs<K: obkv::Key + PartialOrd + Ord>(
|
|||||||
writer.finish()
|
writer.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_noop_del_add_obkv(del_add: KvReaderDelAdd) -> bool {
|
pub fn is_noop_del_add_obkv(del_add: KvReaderDelAdd<'_>) -> bool {
|
||||||
del_add.get(DelAdd::Deletion) == del_add.get(DelAdd::Addition)
|
del_add.get(DelAdd::Deletion) == del_add.get(DelAdd::Addition)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ impl<'i> FacetsUpdateBulk<'i> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facets::bulk")]
|
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facets::bulk")]
|
||||||
pub fn execute(self, wtxn: &mut heed::RwTxn) -> Result<()> {
|
pub fn execute(self, wtxn: &mut heed::RwTxn<'_>) -> Result<()> {
|
||||||
let Self { index, field_ids, group_size, min_level_size, facet_type, delta_data } = self;
|
let Self { index, field_ids, group_size, min_level_size, facet_type, delta_data } = self;
|
||||||
|
|
||||||
let db = match facet_type {
|
let db = match facet_type {
|
||||||
@ -95,7 +95,7 @@ pub(crate) struct FacetsUpdateBulkInner<R: std::io::Read + std::io::Seek> {
|
|||||||
pub min_level_size: u8,
|
pub min_level_size: u8,
|
||||||
}
|
}
|
||||||
impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
||||||
pub fn update(mut self, wtxn: &mut RwTxn, field_ids: &[u16]) -> Result<()> {
|
pub fn update(mut self, wtxn: &mut RwTxn<'_>, field_ids: &[u16]) -> Result<()> {
|
||||||
self.update_level0(wtxn)?;
|
self.update_level0(wtxn)?;
|
||||||
for &field_id in field_ids.iter() {
|
for &field_id in field_ids.iter() {
|
||||||
self.clear_levels(wtxn, field_id)?;
|
self.clear_levels(wtxn, field_id)?;
|
||||||
@ -114,7 +114,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clear_levels(&self, wtxn: &mut heed::RwTxn, field_id: FieldId) -> Result<()> {
|
fn clear_levels(&self, wtxn: &mut heed::RwTxn<'_>, field_id: FieldId) -> Result<()> {
|
||||||
let left = FacetGroupKey::<&[u8]> { field_id, level: 1, left_bound: &[] };
|
let left = FacetGroupKey::<&[u8]> { field_id, level: 1, left_bound: &[] };
|
||||||
let right = FacetGroupKey::<&[u8]> { field_id, level: u8::MAX, left_bound: &[] };
|
let right = FacetGroupKey::<&[u8]> { field_id, level: u8::MAX, left_bound: &[] };
|
||||||
let range = left..=right;
|
let range = left..=right;
|
||||||
@ -122,7 +122,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_level0(&mut self, wtxn: &mut RwTxn) -> Result<()> {
|
fn update_level0(&mut self, wtxn: &mut RwTxn<'_>) -> Result<()> {
|
||||||
let delta_data = match self.delta_data.take() {
|
let delta_data = match self.delta_data.take() {
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
None => return Ok(()),
|
None => return Ok(()),
|
||||||
@ -198,7 +198,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||||||
fn compute_levels_for_field_id(
|
fn compute_levels_for_field_id(
|
||||||
&self,
|
&self,
|
||||||
field_id: FieldId,
|
field_id: FieldId,
|
||||||
txn: &RoTxn,
|
txn: &RoTxn<'_>,
|
||||||
) -> Result<Vec<grenad::Reader<BufReader<File>>>> {
|
) -> Result<Vec<grenad::Reader<BufReader<File>>>> {
|
||||||
let subwriters = self.compute_higher_levels(txn, field_id, 32, &mut |_, _| Ok(()))?;
|
let subwriters = self.compute_higher_levels(txn, field_id, 32, &mut |_, _| Ok(()))?;
|
||||||
|
|
||||||
@ -207,7 +207,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
fn read_level_0<'t>(
|
fn read_level_0<'t>(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &'t RoTxn,
|
rtxn: &'t RoTxn<'t>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
handle_group: &mut dyn FnMut(&[RoaringBitmap], &'t [u8]) -> Result<()>,
|
handle_group: &mut dyn FnMut(&[RoaringBitmap], &'t [u8]) -> Result<()>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@ -261,7 +261,7 @@ impl<R: std::io::Read + std::io::Seek> FacetsUpdateBulkInner<R> {
|
|||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
fn compute_higher_levels<'t>(
|
fn compute_higher_levels<'t>(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &'t RoTxn,
|
rtxn: &'t RoTxn<'t>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
level: u8,
|
level: u8,
|
||||||
handle_group: &mut dyn FnMut(&[RoaringBitmap], &'t [u8]) -> Result<()>,
|
handle_group: &mut dyn FnMut(&[RoaringBitmap], &'t [u8]) -> Result<()>,
|
||||||
|
@ -88,7 +88,7 @@ impl FacetsUpdateIncremental {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facets::incremental")]
|
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facets::incremental")]
|
||||||
pub fn execute(self, wtxn: &mut RwTxn) -> crate::Result<()> {
|
pub fn execute(self, wtxn: &mut RwTxn<'_>) -> crate::Result<()> {
|
||||||
let mut current_field_id = None;
|
let mut current_field_id = None;
|
||||||
let mut facet_level_may_be_updated = false;
|
let mut facet_level_may_be_updated = false;
|
||||||
let mut iter = self.delta_data.into_stream_merger_iter()?;
|
let mut iter = self.delta_data.into_stream_merger_iter()?;
|
||||||
@ -172,7 +172,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
field_id: u16,
|
field_id: u16,
|
||||||
level: u8,
|
level: u8,
|
||||||
facet_value: &[u8],
|
facet_value: &[u8],
|
||||||
txn: &RoTxn,
|
txn: &RoTxn<'_>,
|
||||||
) -> Result<(FacetGroupKey<Vec<u8>>, FacetGroupValue)> {
|
) -> Result<(FacetGroupKey<Vec<u8>>, FacetGroupValue)> {
|
||||||
assert!(level > 0);
|
assert!(level > 0);
|
||||||
match self.db.get_lower_than_or_equal_to(
|
match self.db.get_lower_than_or_equal_to(
|
||||||
@ -215,7 +215,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
/// See documentation of `insert_in_level`
|
/// See documentation of `insert_in_level`
|
||||||
fn modify_in_level_0(
|
fn modify_in_level_0(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
facet_value: &[u8],
|
facet_value: &[u8],
|
||||||
add_docids: Option<&RoaringBitmap>,
|
add_docids: Option<&RoaringBitmap>,
|
||||||
@ -277,7 +277,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
/// Returns `ModificationResult::Insert` if the split is successful.
|
/// Returns `ModificationResult::Insert` if the split is successful.
|
||||||
fn split_group(
|
fn split_group(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
level: u8,
|
level: u8,
|
||||||
insertion_key: FacetGroupKey<Vec<u8>>,
|
insertion_key: FacetGroupKey<Vec<u8>>,
|
||||||
@ -346,7 +346,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
/// This process is needed to avoid removing docids from a group node where the docid is present in several sub-nodes.
|
/// This process is needed to avoid removing docids from a group node where the docid is present in several sub-nodes.
|
||||||
fn trim_del_docids<'a>(
|
fn trim_del_docids<'a>(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
level: u8,
|
level: u8,
|
||||||
insertion_key: &FacetGroupKey<Vec<u8>>,
|
insertion_key: &FacetGroupKey<Vec<u8>>,
|
||||||
@ -383,7 +383,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
///
|
///
|
||||||
fn modify_in_level(
|
fn modify_in_level(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
level: u8,
|
level: u8,
|
||||||
facet_value: &[u8],
|
facet_value: &[u8],
|
||||||
@ -523,7 +523,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
/// Otherwise returns `false` if the tree-nodes have been modified in place.
|
/// Otherwise returns `false` if the tree-nodes have been modified in place.
|
||||||
pub fn modify(
|
pub fn modify(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
facet_value: &[u8],
|
facet_value: &[u8],
|
||||||
add_docids: Option<&RoaringBitmap>,
|
add_docids: Option<&RoaringBitmap>,
|
||||||
@ -558,7 +558,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
/// If it has, we must build an addition level above it.
|
/// If it has, we must build an addition level above it.
|
||||||
/// Then check whether the highest level is under `min_level_size`.
|
/// Then check whether the highest level is under `min_level_size`.
|
||||||
/// If it has, we must remove the complete level.
|
/// If it has, we must remove the complete level.
|
||||||
pub(crate) fn add_or_delete_level(&self, txn: &mut RwTxn, field_id: u16) -> Result<()> {
|
pub(crate) fn add_or_delete_level(&self, txn: &mut RwTxn<'_>, field_id: u16) -> Result<()> {
|
||||||
let highest_level = get_highest_level(txn, self.db, field_id)?;
|
let highest_level = get_highest_level(txn, self.db, field_id)?;
|
||||||
let mut highest_level_prefix = vec![];
|
let mut highest_level_prefix = vec![];
|
||||||
highest_level_prefix.extend_from_slice(&field_id.to_be_bytes());
|
highest_level_prefix.extend_from_slice(&field_id.to_be_bytes());
|
||||||
@ -577,7 +577,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Delete a level.
|
/// Delete a level.
|
||||||
fn delete_level(&self, txn: &mut RwTxn, highest_level_prefix: &[u8]) -> Result<()> {
|
fn delete_level(&self, txn: &mut RwTxn<'_>, highest_level_prefix: &[u8]) -> Result<()> {
|
||||||
let mut to_delete = vec![];
|
let mut to_delete = vec![];
|
||||||
let mut iter =
|
let mut iter =
|
||||||
self.db.remap_types::<Bytes, Bytes>().prefix_iter(txn, highest_level_prefix)?;
|
self.db.remap_types::<Bytes, Bytes>().prefix_iter(txn, highest_level_prefix)?;
|
||||||
@ -599,7 +599,7 @@ impl FacetsUpdateIncrementalInner {
|
|||||||
/// Build an additional level for the field id.
|
/// Build an additional level for the field id.
|
||||||
fn add_level(
|
fn add_level(
|
||||||
&self,
|
&self,
|
||||||
txn: &mut RwTxn,
|
txn: &mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
highest_level: u8,
|
highest_level: u8,
|
||||||
highest_level_prefix: &[u8],
|
highest_level_prefix: &[u8],
|
||||||
|
@ -141,7 +141,7 @@ impl<'i> FacetsUpdate<'i> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn execute(self, wtxn: &mut heed::RwTxn) -> Result<()> {
|
pub fn execute(self, wtxn: &mut heed::RwTxn<'_>) -> Result<()> {
|
||||||
if self.data_size == 0 {
|
if self.data_size == 0 {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -181,7 +181,7 @@ impl<'i> FacetsUpdate<'i> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn index_facet_search(
|
fn index_facet_search(
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
normalized_delta_data: Merger<BufReader<File>, MergeFn>,
|
normalized_delta_data: Merger<BufReader<File>, MergeFn>,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@ -417,7 +417,7 @@ pub(crate) mod test_helpers {
|
|||||||
|
|
||||||
pub fn insert<'a>(
|
pub fn insert<'a>(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &'a mut RwTxn,
|
wtxn: &'a mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
||||||
docids: &RoaringBitmap,
|
docids: &RoaringBitmap,
|
||||||
@ -434,7 +434,7 @@ pub(crate) mod test_helpers {
|
|||||||
}
|
}
|
||||||
pub fn delete_single_docid<'a>(
|
pub fn delete_single_docid<'a>(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &'a mut RwTxn,
|
wtxn: &'a mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
||||||
docid: u32,
|
docid: u32,
|
||||||
@ -444,7 +444,7 @@ pub(crate) mod test_helpers {
|
|||||||
|
|
||||||
pub fn delete<'a>(
|
pub fn delete<'a>(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &'a mut RwTxn,
|
wtxn: &'a mut RwTxn<'_>,
|
||||||
field_id: u16,
|
field_id: u16,
|
||||||
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
key: &'a <BoundCodec as BytesEncode<'a>>::EItem,
|
||||||
docids: &RoaringBitmap,
|
docids: &RoaringBitmap,
|
||||||
@ -462,7 +462,7 @@ pub(crate) mod test_helpers {
|
|||||||
|
|
||||||
pub fn bulk_insert<'a, 'b>(
|
pub fn bulk_insert<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &'a mut RwTxn,
|
wtxn: &'a mut RwTxn<'_>,
|
||||||
field_ids: &[u16],
|
field_ids: &[u16],
|
||||||
els: impl IntoIterator<
|
els: impl IntoIterator<
|
||||||
Item = &'a ((u16, <BoundCodec as BytesEncode<'a>>::EItem), RoaringBitmap),
|
Item = &'a ((u16, <BoundCodec as BytesEncode<'a>>::EItem), RoaringBitmap),
|
||||||
@ -498,7 +498,7 @@ pub(crate) mod test_helpers {
|
|||||||
update.update(wtxn, field_ids).unwrap();
|
update.update(wtxn, field_ids).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn verify_structure_validity(&self, txn: &RoTxn, field_id: u16) {
|
pub fn verify_structure_validity(&self, txn: &RoTxn<'_>, field_id: u16) {
|
||||||
let mut field_id_prefix = vec![];
|
let mut field_id_prefix = vec![];
|
||||||
field_id_prefix.extend_from_slice(&field_id.to_be_bytes());
|
field_id_prefix.extend_from_slice(&field_id.to_be_bytes());
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ use crate::{FieldId, Index, Result};
|
|||||||
/// - if reader.is_empty(), this function may panic in some cases
|
/// - if reader.is_empty(), this function may panic in some cases
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::documents")]
|
#[tracing::instrument(level = "trace", skip_all, target = "indexing::documents")]
|
||||||
pub fn enrich_documents_batch<R: Read + Seek>(
|
pub fn enrich_documents_batch<R: Read + Seek>(
|
||||||
rtxn: &heed::RoTxn,
|
rtxn: &heed::RoTxn<'_>,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
autogenerate_docids: bool,
|
autogenerate_docids: bool,
|
||||||
reader: DocumentsBatchReader<R>,
|
reader: DocumentsBatchReader<R>,
|
||||||
@ -145,9 +145,9 @@ pub fn enrich_documents_batch<R: Read + Seek>(
|
|||||||
#[tracing::instrument(level = "trace", skip(uuid_buffer, documents_batch_index, document)
|
#[tracing::instrument(level = "trace", skip(uuid_buffer, documents_batch_index, document)
|
||||||
target = "indexing::documents")]
|
target = "indexing::documents")]
|
||||||
fn fetch_or_generate_document_id(
|
fn fetch_or_generate_document_id(
|
||||||
document: &obkv::KvReader<FieldId>,
|
document: &obkv::KvReader<'_, FieldId>,
|
||||||
documents_batch_index: &DocumentsBatchIndex,
|
documents_batch_index: &DocumentsBatchIndex,
|
||||||
primary_key: PrimaryKey,
|
primary_key: PrimaryKey<'_>,
|
||||||
autogenerate_docids: bool,
|
autogenerate_docids: bool,
|
||||||
uuid_buffer: &mut [u8; uuid::fmt::Hyphenated::LENGTH],
|
uuid_buffer: &mut [u8; uuid::fmt::Hyphenated::LENGTH],
|
||||||
count: u32,
|
count: u32,
|
||||||
|
@ -179,7 +179,7 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
|||||||
|
|
||||||
/// Check if any searchable fields of a document changed.
|
/// Check if any searchable fields of a document changed.
|
||||||
fn searchable_fields_changed(
|
fn searchable_fields_changed(
|
||||||
obkv: &KvReader<FieldId>,
|
obkv: &KvReader<'_, FieldId>,
|
||||||
settings_diff: &InnerIndexSettingsDiff,
|
settings_diff: &InnerIndexSettingsDiff,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let searchable_fields = &settings_diff.new.searchable_fields_ids;
|
let searchable_fields = &settings_diff.new.searchable_fields_ids;
|
||||||
@ -228,9 +228,9 @@ fn tokenizer_builder<'a>(
|
|||||||
/// Extract words mapped with their positions of a document,
|
/// Extract words mapped with their positions of a document,
|
||||||
/// ensuring no Language detection mistakes was made.
|
/// ensuring no Language detection mistakes was made.
|
||||||
fn lang_safe_tokens_from_document<'a>(
|
fn lang_safe_tokens_from_document<'a>(
|
||||||
obkv: &KvReader<FieldId>,
|
obkv: &KvReader<'_, FieldId>,
|
||||||
settings: &InnerIndexSettings,
|
settings: &InnerIndexSettings,
|
||||||
tokenizer: &Tokenizer,
|
tokenizer: &Tokenizer<'_>,
|
||||||
max_positions_per_attributes: u32,
|
max_positions_per_attributes: u32,
|
||||||
del_add: DelAdd,
|
del_add: DelAdd,
|
||||||
buffers: &'a mut Buffers,
|
buffers: &'a mut Buffers,
|
||||||
@ -295,9 +295,9 @@ fn lang_safe_tokens_from_document<'a>(
|
|||||||
|
|
||||||
/// Extract words mapped with their positions of a document.
|
/// Extract words mapped with their positions of a document.
|
||||||
fn tokens_from_document<'a>(
|
fn tokens_from_document<'a>(
|
||||||
obkv: &KvReader<FieldId>,
|
obkv: &KvReader<'a, FieldId>,
|
||||||
searchable_fields: &[FieldId],
|
searchable_fields: &[FieldId],
|
||||||
tokenizer: &Tokenizer,
|
tokenizer: &Tokenizer<'_>,
|
||||||
max_positions_per_attributes: u32,
|
max_positions_per_attributes: u32,
|
||||||
del_add: DelAdd,
|
del_add: DelAdd,
|
||||||
buffers: &'a mut Buffers,
|
buffers: &'a mut Buffers,
|
||||||
|
@ -68,7 +68,7 @@ pub fn extract_geo_points<R: io::Read + io::Seek>(
|
|||||||
|
|
||||||
/// Extract the finite floats lat and lng from two bytes slices.
|
/// Extract the finite floats lat and lng from two bytes slices.
|
||||||
fn extract_lat_lng(
|
fn extract_lat_lng(
|
||||||
document: &obkv::KvReader<FieldId>,
|
document: &obkv::KvReader<'_, FieldId>,
|
||||||
settings: &InnerIndexSettings,
|
settings: &InnerIndexSettings,
|
||||||
deladd: DelAdd,
|
deladd: DelAdd,
|
||||||
document_id: impl Fn() -> Value,
|
document_id: impl Fn() -> Value,
|
||||||
|
@ -172,7 +172,7 @@ pub fn grenad_obkv_into_chunks<R: io::Read + io::Seek>(
|
|||||||
pub fn write_sorter_into_database<K, V, FS, FM>(
|
pub fn write_sorter_into_database<K, V, FS, FM>(
|
||||||
sorter: Sorter<MergeFn>,
|
sorter: Sorter<MergeFn>,
|
||||||
database: &heed::Database<K, V>,
|
database: &heed::Database<K, V>,
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
index_is_empty: bool,
|
index_is_empty: bool,
|
||||||
serialize_value: FS,
|
serialize_value: FS,
|
||||||
merge_values: FM,
|
merge_values: FM,
|
||||||
|
@ -45,8 +45,8 @@ pub fn keep_latest_obkv<'a>(_key: &[u8], obkvs: &[Cow<'a, [u8]>]) -> Result<Cow<
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn merge_two_del_add_obkvs(
|
pub fn merge_two_del_add_obkvs(
|
||||||
base: obkv::KvReaderU16,
|
base: obkv::KvReaderU16<'_>,
|
||||||
update: obkv::KvReaderU16,
|
update: obkv::KvReaderU16<'_>,
|
||||||
merge_additions: bool,
|
merge_additions: bool,
|
||||||
buffer: &mut Vec<u8>,
|
buffer: &mut Vec<u8>,
|
||||||
) {
|
) {
|
||||||
|
@ -758,7 +758,7 @@ where
|
|||||||
name = "index_documents_word_prefix_docids"
|
name = "index_documents_word_prefix_docids"
|
||||||
)]
|
)]
|
||||||
fn execute_word_prefix_docids(
|
fn execute_word_prefix_docids(
|
||||||
txn: &mut heed::RwTxn,
|
txn: &mut heed::RwTxn<'_>,
|
||||||
merger: Merger<CursorClonableMmap, MergeFn>,
|
merger: Merger<CursorClonableMmap, MergeFn>,
|
||||||
word_docids_db: Database<Str, CboRoaringBitmapCodec>,
|
word_docids_db: Database<Str, CboRoaringBitmapCodec>,
|
||||||
word_prefix_docids_db: Database<Str, CboRoaringBitmapCodec>,
|
word_prefix_docids_db: Database<Str, CboRoaringBitmapCodec>,
|
||||||
|
@ -102,7 +102,7 @@ fn create_fields_mapping(
|
|||||||
|
|
||||||
impl<'a, 'i> Transform<'a, 'i> {
|
impl<'a, 'i> Transform<'a, 'i> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
index: &'i Index,
|
index: &'i Index,
|
||||||
indexer_settings: &'a IndexerConfig,
|
indexer_settings: &'a IndexerConfig,
|
||||||
index_documents_method: IndexDocumentsMethod,
|
index_documents_method: IndexDocumentsMethod,
|
||||||
@ -155,7 +155,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
pub fn read_documents<R, FP, FA>(
|
pub fn read_documents<R, FP, FA>(
|
||||||
&mut self,
|
&mut self,
|
||||||
reader: EnrichedDocumentsBatchReader<R>,
|
reader: EnrichedDocumentsBatchReader<R>,
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
progress_callback: FP,
|
progress_callback: FP,
|
||||||
should_abort: FA,
|
should_abort: FA,
|
||||||
) -> Result<usize>
|
) -> Result<usize>
|
||||||
@ -177,7 +177,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
let mut document_sorter_key_buffer = Vec::new();
|
let mut document_sorter_key_buffer = Vec::new();
|
||||||
let mut documents_count = 0;
|
let mut documents_count = 0;
|
||||||
let mut docid_buffer: Vec<u8> = Vec::new();
|
let mut docid_buffer: Vec<u8> = Vec::new();
|
||||||
let mut field_buffer: Vec<(u16, Cow<[u8]>)> = Vec::new();
|
let mut field_buffer: Vec<(u16, Cow<'_, [u8]>)> = Vec::new();
|
||||||
while let Some(enriched_document) = cursor.next_enriched_document()? {
|
while let Some(enriched_document) = cursor.next_enriched_document()? {
|
||||||
let EnrichedDocument { document, document_id } = enriched_document;
|
let EnrichedDocument { document, document_id } = enriched_document;
|
||||||
|
|
||||||
@ -370,7 +370,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
pub fn remove_documents<FA>(
|
pub fn remove_documents<FA>(
|
||||||
&mut self,
|
&mut self,
|
||||||
mut to_remove: Vec<String>,
|
mut to_remove: Vec<String>,
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
should_abort: FA,
|
should_abort: FA,
|
||||||
) -> Result<usize>
|
) -> Result<usize>
|
||||||
where
|
where
|
||||||
@ -459,7 +459,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
pub fn remove_documents_from_db_no_batch<FA>(
|
pub fn remove_documents_from_db_no_batch<FA>(
|
||||||
&mut self,
|
&mut self,
|
||||||
to_remove: &RoaringBitmap,
|
to_remove: &RoaringBitmap,
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
should_abort: FA,
|
should_abort: FA,
|
||||||
) -> Result<usize>
|
) -> Result<usize>
|
||||||
where
|
where
|
||||||
@ -493,7 +493,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
internal_docid: u32,
|
internal_docid: u32,
|
||||||
external_docid: String,
|
external_docid: String,
|
||||||
txn: &heed::RoTxn,
|
txn: &heed::RoTxn<'_>,
|
||||||
document_sorter_key_buffer: &mut Vec<u8>,
|
document_sorter_key_buffer: &mut Vec<u8>,
|
||||||
document_sorter_value_buffer: &mut Vec<u8>,
|
document_sorter_value_buffer: &mut Vec<u8>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@ -552,7 +552,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
target = "indexing::transform"
|
target = "indexing::transform"
|
||||||
)]
|
)]
|
||||||
fn flatten_from_fields_ids_map(
|
fn flatten_from_fields_ids_map(
|
||||||
obkv: &KvReader<FieldId>,
|
obkv: &KvReader<'_, FieldId>,
|
||||||
fields_ids_map: &mut FieldsIdsMap,
|
fields_ids_map: &mut FieldsIdsMap,
|
||||||
) -> Result<Option<Vec<u8>>> {
|
) -> Result<Option<Vec<u8>>> {
|
||||||
if obkv
|
if obkv
|
||||||
@ -566,7 +566,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
// We first extract all the key+value out of the obkv. If a value is not nested
|
// We first extract all the key+value out of the obkv. If a value is not nested
|
||||||
// we keep a reference on its value. If the value is nested we'll get its value
|
// we keep a reference on its value. If the value is nested we'll get its value
|
||||||
// as an owned `Vec<u8>` after flattening it.
|
// as an owned `Vec<u8>` after flattening it.
|
||||||
let mut key_value: Vec<(FieldId, Cow<[u8]>)> = Vec::new();
|
let mut key_value: Vec<(FieldId, Cow<'_, [u8]>)> = Vec::new();
|
||||||
|
|
||||||
// the object we're going to use to store the fields that need to be flattened.
|
// the object we're going to use to store the fields that need to be flattened.
|
||||||
let mut doc = serde_json::Map::new();
|
let mut doc = serde_json::Map::new();
|
||||||
@ -609,7 +609,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
|
|
||||||
/// Generate an obkv from a slice of key / value sorted by key.
|
/// Generate an obkv from a slice of key / value sorted by key.
|
||||||
fn create_obkv_from_key_value(
|
fn create_obkv_from_key_value(
|
||||||
key_value: &mut [(FieldId, Cow<[u8]>)],
|
key_value: &mut [(FieldId, Cow<'_, [u8]>)],
|
||||||
output_buffer: &mut Vec<u8>,
|
output_buffer: &mut Vec<u8>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
@ -677,7 +677,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::transform")]
|
#[tracing::instrument(level = "trace", skip_all, target = "indexing::transform")]
|
||||||
pub(crate) fn output_from_sorter<F>(
|
pub(crate) fn output_from_sorter<F>(
|
||||||
self,
|
self,
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
progress_callback: F,
|
progress_callback: F,
|
||||||
) -> Result<TransformOutput>
|
) -> Result<TransformOutput>
|
||||||
where
|
where
|
||||||
@ -837,7 +837,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
/// then fill the provided buffers with delta documents using KvWritterDelAdd.
|
/// then fill the provided buffers with delta documents using KvWritterDelAdd.
|
||||||
#[allow(clippy::too_many_arguments)] // need the vectors + fid, feel free to create a struct xo xo
|
#[allow(clippy::too_many_arguments)] // need the vectors + fid, feel free to create a struct xo xo
|
||||||
fn rebind_existing_document(
|
fn rebind_existing_document(
|
||||||
old_obkv: KvReader<FieldId>,
|
old_obkv: KvReader<'_, FieldId>,
|
||||||
settings_diff: &InnerIndexSettingsDiff,
|
settings_diff: &InnerIndexSettingsDiff,
|
||||||
modified_faceted_fields: &HashSet<String>,
|
modified_faceted_fields: &HashSet<String>,
|
||||||
mut injected_vectors: serde_json::Map<String, serde_json::Value>,
|
mut injected_vectors: serde_json::Map<String, serde_json::Value>,
|
||||||
@ -990,7 +990,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let readers: Result<
|
let readers: Result<
|
||||||
BTreeMap<&str, (Vec<arroy::Reader<arroy::distances::Angular>>, &RoaringBitmap)>,
|
BTreeMap<&str, (Vec<arroy::Reader<'_, arroy::distances::Angular>>, &RoaringBitmap)>,
|
||||||
> = settings_diff
|
> = settings_diff
|
||||||
.embedding_config_updates
|
.embedding_config_updates
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -128,7 +128,7 @@ impl TypedChunk {
|
|||||||
/// Return new documents seen.
|
/// Return new documents seen.
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::write_db")]
|
#[tracing::instrument(level = "trace", skip_all, target = "indexing::write_db")]
|
||||||
pub(crate) fn write_typed_chunk_into_index(
|
pub(crate) fn write_typed_chunk_into_index(
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
index: &Index,
|
index: &Index,
|
||||||
settings_diff: &InnerIndexSettingsDiff,
|
settings_diff: &InnerIndexSettingsDiff,
|
||||||
typed_chunks: Vec<TypedChunk>,
|
typed_chunks: Vec<TypedChunk>,
|
||||||
@ -165,7 +165,7 @@ pub(crate) fn write_typed_chunk_into_index(
|
|||||||
let mut vectors_buffer = Vec::new();
|
let mut vectors_buffer = Vec::new();
|
||||||
while let Some((key, reader)) = iter.next()? {
|
while let Some((key, reader)) = iter.next()? {
|
||||||
let mut writer: KvWriter<_, FieldId> = KvWriter::memory();
|
let mut writer: KvWriter<_, FieldId> = KvWriter::memory();
|
||||||
let reader: KvReader<FieldId> = KvReader::new(reader);
|
let reader: KvReader<'_, FieldId> = KvReader::new(reader);
|
||||||
|
|
||||||
let (document_id_bytes, external_id_bytes) = try_split_array_at(key)
|
let (document_id_bytes, external_id_bytes) = try_split_array_at(key)
|
||||||
.ok_or(SerializationError::Decoding { db_name: Some(DOCUMENTS) })?;
|
.ok_or(SerializationError::Decoding { db_name: Some(DOCUMENTS) })?;
|
||||||
@ -835,7 +835,7 @@ fn merge_word_docids_reader_into_fst(
|
|||||||
fn write_entries_into_database<R, K, V, FS, FM>(
|
fn write_entries_into_database<R, K, V, FS, FM>(
|
||||||
merger: Merger<R, MergeFn>,
|
merger: Merger<R, MergeFn>,
|
||||||
database: &heed::Database<K, V>,
|
database: &heed::Database<K, V>,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
serialize_value: FS,
|
serialize_value: FS,
|
||||||
merge_values: FM,
|
merge_values: FM,
|
||||||
) -> Result<()>
|
) -> Result<()>
|
||||||
@ -872,7 +872,7 @@ where
|
|||||||
fn write_proximity_entries_into_database_additional_searchables<R>(
|
fn write_proximity_entries_into_database_additional_searchables<R>(
|
||||||
merger: Merger<R, MergeFn>,
|
merger: Merger<R, MergeFn>,
|
||||||
database: &heed::Database<U8StrStrCodec, CboRoaringBitmapCodec>,
|
database: &heed::Database<U8StrStrCodec, CboRoaringBitmapCodec>,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn<'_>,
|
||||||
) -> Result<()>
|
) -> Result<()>
|
||||||
where
|
where
|
||||||
R: io::Read + io::Seek,
|
R: io::Read + io::Seek,
|
||||||
|
@ -44,7 +44,7 @@ where
|
|||||||
{
|
{
|
||||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
value: deserr::Value<V>,
|
value: deserr::Value<V>,
|
||||||
location: deserr::ValuePointerRef,
|
location: deserr::ValuePointerRef<'_>,
|
||||||
) -> std::result::Result<Self, E> {
|
) -> std::result::Result<Self, E> {
|
||||||
match value {
|
match value {
|
||||||
deserr::Value::Null => Ok(Setting::Reset),
|
deserr::Value::Null => Ok(Setting::Reset),
|
||||||
@ -617,7 +617,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
|||||||
fn update_synonyms(&mut self) -> Result<bool> {
|
fn update_synonyms(&mut self) -> Result<bool> {
|
||||||
match self.synonyms {
|
match self.synonyms {
|
||||||
Setting::Set(ref user_synonyms) => {
|
Setting::Set(ref user_synonyms) => {
|
||||||
fn normalize(tokenizer: &Tokenizer, text: &str) -> Vec<String> {
|
fn normalize(tokenizer: &Tokenizer<'_>, text: &str) -> Vec<String> {
|
||||||
tokenizer
|
tokenizer
|
||||||
.tokenize(text)
|
.tokenize(text)
|
||||||
.filter_map(|token| {
|
.filter_map(|token| {
|
||||||
@ -838,7 +838,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
|||||||
fn update_exact_words(&mut self) -> Result<()> {
|
fn update_exact_words(&mut self) -> Result<()> {
|
||||||
match self.exact_words {
|
match self.exact_words {
|
||||||
Setting::Set(ref mut words) => {
|
Setting::Set(ref mut words) => {
|
||||||
fn normalize(tokenizer: &Tokenizer, text: &str) -> String {
|
fn normalize(tokenizer: &Tokenizer<'_>, text: &str) -> String {
|
||||||
tokenizer.tokenize(text).map(|token| token.lemma().to_string()).collect()
|
tokenizer.tokenize(text).map(|token| token.lemma().to_string()).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1344,7 +1344,7 @@ pub(crate) struct InnerIndexSettings {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl InnerIndexSettings {
|
impl InnerIndexSettings {
|
||||||
pub fn from_index(index: &Index, rtxn: &heed::RoTxn) -> Result<Self> {
|
pub fn from_index(index: &Index, rtxn: &heed::RoTxn<'_>) -> Result<Self> {
|
||||||
let stop_words = index.stop_words(rtxn)?;
|
let stop_words = index.stop_words(rtxn)?;
|
||||||
let stop_words = stop_words.map(|sw| sw.map_data(Vec::from).unwrap());
|
let stop_words = stop_words.map(|sw| sw.map_data(Vec::from).unwrap());
|
||||||
let allowed_separators = index.allowed_separators(rtxn)?;
|
let allowed_separators = index.allowed_separators(rtxn)?;
|
||||||
@ -1407,7 +1407,7 @@ impl InnerIndexSettings {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// find and insert the new field ids
|
// find and insert the new field ids
|
||||||
pub fn recompute_facets(&mut self, wtxn: &mut heed::RwTxn, index: &Index) -> Result<()> {
|
pub fn recompute_facets(&mut self, wtxn: &mut heed::RwTxn<'_>, index: &Index) -> Result<()> {
|
||||||
let new_facets = self
|
let new_facets = self
|
||||||
.fields_ids_map
|
.fields_ids_map
|
||||||
.iter()
|
.iter()
|
||||||
@ -1422,7 +1422,11 @@ impl InnerIndexSettings {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// find and insert the new field ids
|
// find and insert the new field ids
|
||||||
pub fn recompute_searchables(&mut self, wtxn: &mut heed::RwTxn, index: &Index) -> Result<()> {
|
pub fn recompute_searchables(
|
||||||
|
&mut self,
|
||||||
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
|
index: &Index,
|
||||||
|
) -> Result<()> {
|
||||||
let searchable_fields = self
|
let searchable_fields = self
|
||||||
.user_defined_searchable_fields
|
.user_defined_searchable_fields
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -345,7 +345,7 @@ where
|
|||||||
{
|
{
|
||||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
value: deserr::Value<V>,
|
value: deserr::Value<V>,
|
||||||
location: deserr::ValuePointerRef,
|
location: deserr::ValuePointerRef<'_>,
|
||||||
) -> Result<Self, E> {
|
) -> Result<Self, E> {
|
||||||
let value = DistributionShiftSerializable::deserialize_from_value(value, location)?;
|
let value = DistributionShiftSerializable::deserialize_from_value(value, location)?;
|
||||||
if value.mean < 0. || value.mean > 1. {
|
if value.mean < 0. || value.mean > 1. {
|
||||||
|
@ -21,7 +21,7 @@ pub enum Vectors {
|
|||||||
impl<E: DeserializeError> Deserr<E> for Vectors {
|
impl<E: DeserializeError> Deserr<E> for Vectors {
|
||||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
value: deserr::Value<V>,
|
value: deserr::Value<V>,
|
||||||
location: deserr::ValuePointerRef,
|
location: deserr::ValuePointerRef<'_>,
|
||||||
) -> Result<Self, E> {
|
) -> Result<Self, E> {
|
||||||
match value {
|
match value {
|
||||||
deserr::Value::Sequence(_) | deserr::Value::Null => {
|
deserr::Value::Sequence(_) | deserr::Value::Null => {
|
||||||
@ -186,7 +186,7 @@ pub struct ParsedVectors(pub BTreeMap<String, Vectors>);
|
|||||||
impl<E: DeserializeError> Deserr<E> for ParsedVectors {
|
impl<E: DeserializeError> Deserr<E> for ParsedVectors {
|
||||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
value: deserr::Value<V>,
|
value: deserr::Value<V>,
|
||||||
location: deserr::ValuePointerRef,
|
location: deserr::ValuePointerRef<'_>,
|
||||||
) -> Result<Self, E> {
|
) -> Result<Self, E> {
|
||||||
let value = <BTreeMap<String, Vectors>>::deserialize_from_value(value, location)?;
|
let value = <BTreeMap<String, Vectors>>::deserialize_from_value(value, location)?;
|
||||||
Ok(ParsedVectors(value))
|
Ok(ParsedVectors(value))
|
||||||
@ -230,7 +230,7 @@ impl Error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn to_vector_map(
|
fn to_vector_map(
|
||||||
obkv: KvReaderDelAdd,
|
obkv: KvReaderDelAdd<'_>,
|
||||||
side: DelAdd,
|
side: DelAdd,
|
||||||
) -> Result<Option<BTreeMap<String, Vectors>>, Error> {
|
) -> Result<Option<BTreeMap<String, Vectors>>, Error> {
|
||||||
Ok(if let Some(value) = obkv.get(side) {
|
Ok(if let Some(value) = obkv.get(side) {
|
||||||
@ -252,7 +252,7 @@ pub struct VectorOrArrayOfVectors {
|
|||||||
impl<E: DeserializeError> Deserr<E> for VectorOrArrayOfVectors {
|
impl<E: DeserializeError> Deserr<E> for VectorOrArrayOfVectors {
|
||||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||||
value: deserr::Value<V>,
|
value: deserr::Value<V>,
|
||||||
location: deserr::ValuePointerRef,
|
location: deserr::ValuePointerRef<'_>,
|
||||||
) -> Result<Self, E> {
|
) -> Result<Self, E> {
|
||||||
match value {
|
match value {
|
||||||
deserr::Value::Null => Ok(VectorOrArrayOfVectors { inner: None }),
|
deserr::Value::Null => Ok(VectorOrArrayOfVectors { inner: None }),
|
||||||
|
Loading…
Reference in New Issue
Block a user