2020-10-25 21:49:04 +08:00
use std ::borrow ::Cow ;
2023-07-26 15:33:42 +08:00
use std ::collections ::{ BTreeMap , BTreeSet , HashMap , HashSet } ;
2022-06-22 22:23:11 +08:00
use std ::fs ::File ;
2020-10-30 17:56:35 +08:00
use std ::path ::Path ;
2020-10-25 21:49:04 +08:00
2022-10-15 03:05:53 +08:00
use charabia ::{ Language , Script } ;
2021-07-05 22:36:13 +08:00
use heed ::flags ::Flags ;
2020-10-21 21:55:48 +08:00
use heed ::types ::* ;
2022-06-22 22:23:11 +08:00
use heed ::{ CompactionOption , Database , PolyDatabase , RoTxn , RwTxn } ;
2020-10-21 21:55:48 +08:00
use roaring ::RoaringBitmap ;
2021-08-23 22:32:11 +08:00
use rstar ::RTree ;
2022-02-15 18:41:55 +08:00
use time ::OffsetDateTime ;
2020-10-21 21:55:48 +08:00
2023-07-25 18:36:01 +08:00
use crate ::distance ::NDotProductPoint ;
2021-07-22 23:11:17 +08:00
use crate ::error ::{ InternalError , UserError } ;
2021-06-17 00:33:33 +08:00
use crate ::fields_ids_map ::FieldsIdsMap ;
2022-09-05 23:31:26 +08:00
use crate ::heed_codec ::facet ::{
FacetGroupKeyCodec , FacetGroupValueCodec , FieldDocIdFacetF64Codec , FieldDocIdFacetStringCodec ,
2022-10-12 15:42:55 +08:00
FieldIdCodec , OrderedF64Codec ,
2022-09-05 23:31:26 +08:00
} ;
2023-07-20 23:57:07 +08:00
use crate ::heed_codec ::{
BEU16StrCodec , FstSetCodec , ScriptLanguageCodec , StrBEU16Codec , StrRefCodec ,
} ;
2023-06-21 23:07:02 +08:00
use crate ::readable_slices ::ReadableSlices ;
2021-06-17 00:33:33 +08:00
use crate ::{
2023-06-07 16:02:21 +08:00
default_criteria , CboRoaringBitmapCodec , Criterion , DocumentId , ExternalDocumentsIds ,
FacetDistribution , FieldDistribution , FieldId , FieldIdWordCountCodec , GeoPoint , ObkvCodec ,
2023-06-22 23:13:40 +08:00
OrderBy , Result , RoaringBitmapCodec , RoaringBitmapLenCodec , Search , U8StrStrCodec , BEU16 ,
BEU32 ,
2021-04-21 17:49:26 +08:00
} ;
2020-10-21 21:55:48 +08:00
2023-06-14 20:20:05 +08:00
/// The HNSW data-structure that we serialize, fill and search in.
2023-07-25 18:36:01 +08:00
pub type Hnsw = instant_distance ::Hnsw < NDotProductPoint > ;
2023-06-08 18:19:06 +08:00
2022-04-01 00:23:12 +08:00
pub const DEFAULT_MIN_WORD_LEN_ONE_TYPO : u8 = 5 ;
pub const DEFAULT_MIN_WORD_LEN_TWO_TYPOS : u8 = 9 ;
2022-03-21 20:03:06 +08:00
2021-06-15 17:06:42 +08:00
pub mod main_key {
pub const CRITERIA_KEY : & str = " criteria " ;
pub const DISPLAYED_FIELDS_KEY : & str = " displayed-fields " ;
pub const DISTINCT_FIELD_KEY : & str = " distinct-field-key " ;
pub const DOCUMENTS_IDS_KEY : & str = " documents-ids " ;
2022-03-24 00:28:41 +08:00
pub const HIDDEN_FACETED_FIELDS_KEY : & str = " hidden-faceted-fields " ;
2021-06-15 17:06:42 +08:00
pub const FILTERABLE_FIELDS_KEY : & str = " filterable-fields " ;
2021-08-23 17:37:18 +08:00
pub const SORTABLE_FIELDS_KEY : & str = " sortable-fields " ;
2021-06-17 21:16:20 +08:00
pub const FIELD_DISTRIBUTION_KEY : & str = " fields-distribution " ;
2021-06-15 17:06:42 +08:00
pub const FIELDS_IDS_MAP_KEY : & str = " fields-ids-map " ;
2021-08-26 23:49:50 +08:00
pub const GEO_FACETED_DOCUMENTS_IDS_KEY : & str = " geo-faceted-documents-ids " ;
pub const GEO_RTREE_KEY : & str = " geo-rtree " ;
2023-06-21 23:07:02 +08:00
/// The prefix of the key that is used to store the, potential big, HNSW structure.
/// It is concatenated with a big-endian encoded number (non-human readable).
/// e.g. vector-hnsw0x0032.
pub const VECTOR_HNSW_KEY_PREFIX : & str = " vector-hnsw " ;
2023-10-24 23:02:55 +08:00
pub const EXTERNAL_DOCUMENTS_IDS_KEY : & str = " external-documents-ids " ;
2021-06-15 17:06:42 +08:00
pub const PRIMARY_KEY_KEY : & str = " primary-key " ;
pub const SEARCHABLE_FIELDS_KEY : & str = " searchable-fields " ;
2022-05-16 21:22:52 +08:00
pub const USER_DEFINED_SEARCHABLE_FIELDS_KEY : & str = " user-defined-searchable-fields " ;
2021-06-15 17:06:42 +08:00
pub const STOP_WORDS_KEY : & str = " stop-words " ;
2023-07-24 23:00:18 +08:00
pub const NON_SEPARATOR_TOKENS_KEY : & str = " non-separator-tokens " ;
pub const SEPARATOR_TOKENS_KEY : & str = " separator-tokens " ;
pub const DICTIONARY_KEY : & str = " dictionary " ;
2021-06-15 17:06:42 +08:00
pub const SYNONYMS_KEY : & str = " synonyms " ;
2023-07-26 15:33:42 +08:00
pub const USER_DEFINED_SYNONYMS_KEY : & str = " user-defined-synonyms " ;
2021-06-15 17:06:42 +08:00
pub const WORDS_FST_KEY : & str = " words-fst " ;
pub const WORDS_PREFIXES_FST_KEY : & str = " words-prefixes-fst " ;
pub const CREATED_AT_KEY : & str = " created-at " ;
pub const UPDATED_AT_KEY : & str = " updated-at " ;
2022-03-16 17:03:18 +08:00
pub const AUTHORIZE_TYPOS : & str = " authorize-typos " ;
2022-03-21 20:03:06 +08:00
pub const ONE_TYPO_WORD_LEN : & str = " one-typo-word-len " ;
pub const TWO_TYPOS_WORD_LEN : & str = " two-typos-word-len " ;
2022-03-21 21:03:31 +08:00
pub const EXACT_WORDS : & str = " exact-words " ;
2022-03-23 02:07:59 +08:00
pub const EXACT_ATTRIBUTES : & str = " exact-attributes " ;
2022-06-08 23:28:23 +08:00
pub const MAX_VALUES_PER_FACET : & str = " max-values-per-facet " ;
2023-06-22 23:13:40 +08:00
pub const SORT_FACET_VALUES_BY : & str = " sort-facet-values-by " ;
2022-06-22 18:00:45 +08:00
pub const PAGINATION_MAX_TOTAL_HITS : & str = " pagination-max-total-hits " ;
2021-06-15 17:06:42 +08:00
}
pub mod db_name {
pub const MAIN : & str = " main " ;
pub const WORD_DOCIDS : & str = " word-docids " ;
2022-03-24 22:22:57 +08:00
pub const EXACT_WORD_DOCIDS : & str = " exact-word-docids " ;
2021-06-15 17:06:42 +08:00
pub const WORD_PREFIX_DOCIDS : & str = " word-prefix-docids " ;
2022-03-25 17:49:34 +08:00
pub const EXACT_WORD_PREFIX_DOCIDS : & str = " exact-word-prefix-docids " ;
2021-06-15 17:06:42 +08:00
pub const DOCID_WORD_POSITIONS : & str = " docid-word-positions " ;
pub const WORD_PAIR_PROXIMITY_DOCIDS : & str = " word-pair-proximity-docids " ;
pub const WORD_PREFIX_PAIR_PROXIMITY_DOCIDS : & str = " word-prefix-pair-proximity-docids " ;
2022-09-14 21:33:13 +08:00
pub const PREFIX_WORD_PAIR_PROXIMITY_DOCIDS : & str = " prefix-word-pair-proximity-docids " ;
2021-10-05 17:18:42 +08:00
pub const WORD_POSITION_DOCIDS : & str = " word-position-docids " ;
2023-03-23 16:22:01 +08:00
pub const WORD_FIELD_ID_DOCIDS : & str = " word-field-id-docids " ;
2021-10-05 17:18:42 +08:00
pub const WORD_PREFIX_POSITION_DOCIDS : & str = " word-prefix-position-docids " ;
2023-03-23 16:22:01 +08:00
pub const WORD_PREFIX_FIELD_ID_DOCIDS : & str = " word-prefix-field-id-docids " ;
2021-06-15 17:06:42 +08:00
pub const FIELD_ID_WORD_COUNT_DOCIDS : & str = " field-id-word-count-docids " ;
pub const FACET_ID_F64_DOCIDS : & str = " facet-id-f64-docids " ;
2022-07-19 15:30:19 +08:00
pub const FACET_ID_EXISTS_DOCIDS : & str = " facet-id-exists-docids " ;
2023-03-08 23:14:00 +08:00
pub const FACET_ID_IS_NULL_DOCIDS : & str = " facet-id-is-null-docids " ;
2023-03-15 01:08:12 +08:00
pub const FACET_ID_IS_EMPTY_DOCIDS : & str = " facet-id-is-empty-docids " ;
2021-06-15 17:06:42 +08:00
pub const FACET_ID_STRING_DOCIDS : & str = " facet-id-string-docids " ;
2023-07-20 23:57:07 +08:00
pub const FACET_ID_NORMALIZED_STRING_STRINGS : & str = " facet-id-normalized-string-strings " ;
2023-05-02 15:34:28 +08:00
pub const FACET_ID_STRING_FST : & str = " facet-id-string-fst " ;
2021-06-15 17:06:42 +08:00
pub const FIELD_ID_DOCID_FACET_F64S : & str = " field-id-docid-facet-f64s " ;
pub const FIELD_ID_DOCID_FACET_STRINGS : & str = " field-id-docid-facet-strings " ;
2023-06-14 20:20:05 +08:00
pub const VECTOR_ID_DOCID : & str = " vector-id-docids " ;
2021-06-15 17:06:42 +08:00
pub const DOCUMENTS : & str = " documents " ;
2022-10-12 19:21:35 +08:00
pub const SCRIPT_LANGUAGE_DOCIDS : & str = " script_language_docids " ;
2021-06-15 17:06:42 +08:00
}
2020-10-21 21:55:48 +08:00
#[ derive(Clone) ]
pub struct Index {
2020-10-30 17:56:35 +08:00
/// The LMDB environment which this index is associated with.
2022-06-13 22:39:17 +08:00
pub ( crate ) env : heed ::Env ,
2021-04-21 17:49:26 +08:00
2020-10-22 20:23:33 +08:00
/// Contains many different types (e.g. the fields ids map).
2022-06-13 22:39:17 +08:00
pub ( crate ) main : PolyDatabase ,
2021-04-21 17:49:26 +08:00
2020-10-21 21:55:48 +08:00
/// A word and all the documents ids containing the word.
2023-09-25 22:39:32 +08:00
pub word_docids : Database < Str , CboRoaringBitmapCodec > ,
2022-03-24 22:22:57 +08:00
/// A word and all the documents ids containing the word, from attributes for which typos are not allowed.
2023-09-25 22:39:32 +08:00
pub exact_word_docids : Database < Str , CboRoaringBitmapCodec > ,
2022-03-24 22:22:57 +08:00
2021-02-03 17:30:33 +08:00
/// A prefix of word and all the documents ids containing this prefix.
2023-09-25 22:39:32 +08:00
pub word_prefix_docids : Database < Str , CboRoaringBitmapCodec > ,
2021-04-21 17:49:26 +08:00
2022-03-25 17:49:34 +08:00
/// A prefix of word and all the documents ids containing this prefix, from attributes for which typos are not allowed.
2023-09-25 22:39:32 +08:00
pub exact_word_prefix_docids : Database < Str , CboRoaringBitmapCodec > ,
2022-03-25 17:49:34 +08:00
2020-10-21 21:55:48 +08:00
/// Maps the proximity between a pair of words with all the docids where this relation appears.
2022-09-14 20:01:53 +08:00
pub word_pair_proximity_docids : Database < U8StrStrCodec , CboRoaringBitmapCodec > ,
2021-02-10 17:28:15 +08:00
/// Maps the proximity between a pair of word and prefix with all the docids where this relation appears.
2022-09-14 20:01:53 +08:00
pub word_prefix_pair_proximity_docids : Database < U8StrStrCodec , CboRoaringBitmapCodec > ,
2022-09-14 21:33:13 +08:00
/// Maps the proximity between a pair of prefix and word with all the docids where this relation appears.
pub prefix_word_pair_proximity_docids : Database < U8StrStrCodec , CboRoaringBitmapCodec > ,
2021-05-27 21:27:41 +08:00
2021-10-05 17:18:42 +08:00
/// Maps the word and the position with the docids that corresponds to it.
2023-03-23 16:22:01 +08:00
pub word_position_docids : Database < StrBEU16Codec , CboRoaringBitmapCodec > ,
/// Maps the word and the field id with the docids that corresponds to it.
pub word_fid_docids : Database < StrBEU16Codec , CboRoaringBitmapCodec > ,
2021-05-27 21:27:41 +08:00
/// Maps the field id and the word count with the docids that corresponds to it.
pub field_id_word_count_docids : Database < FieldIdWordCountCodec , CboRoaringBitmapCodec > ,
2023-04-24 15:59:30 +08:00
/// Maps the word prefix and a position with all the docids where the prefix appears at the position.
2023-03-23 16:22:01 +08:00
pub word_prefix_position_docids : Database < StrBEU16Codec , CboRoaringBitmapCodec > ,
2023-04-24 15:59:30 +08:00
/// Maps the word prefix and a field id with all the docids where the prefix appears inside the field
2023-03-23 16:22:01 +08:00
pub word_prefix_fid_docids : Database < StrBEU16Codec , CboRoaringBitmapCodec > ,
2021-04-21 17:49:26 +08:00
2022-10-17 19:51:04 +08:00
/// Maps the script and language with all the docids that corresponds to it.
2022-10-12 19:21:35 +08:00
pub script_language_docids : Database < ScriptLanguageCodec , RoaringBitmapCodec > ,
2022-07-19 15:30:19 +08:00
/// Maps the facet field id and the docids for which this field exists
pub facet_id_exists_docids : Database < FieldIdCodec , CboRoaringBitmapCodec > ,
2023-03-08 23:14:00 +08:00
/// Maps the facet field id and the docids for which this field is set as null
pub facet_id_is_null_docids : Database < FieldIdCodec , CboRoaringBitmapCodec > ,
2023-03-15 01:08:12 +08:00
/// Maps the facet field id and the docids for which this field is considered empty
pub facet_id_is_empty_docids : Database < FieldIdCodec , CboRoaringBitmapCodec > ,
2022-07-19 15:30:19 +08:00
2022-08-29 22:01:54 +08:00
/// Maps the facet field id and ranges of numbers with the docids that corresponds to them.
2022-09-05 19:01:36 +08:00
pub facet_id_f64_docids : Database < FacetGroupKeyCodec < OrderedF64Codec > , FacetGroupValueCodec > ,
2022-08-29 22:01:54 +08:00
/// Maps the facet field id and ranges of strings with the docids that corresponds to them.
2022-09-05 19:01:36 +08:00
pub facet_id_string_docids : Database < FacetGroupKeyCodec < StrRefCodec > , FacetGroupValueCodec > ,
2023-07-20 23:57:07 +08:00
/// Maps the facet field id of the normalized-for-search string facets with their original versions.
pub facet_id_normalized_string_strings : Database < BEU16StrCodec , SerdeJson < BTreeSet < String > > > ,
2023-05-02 15:34:28 +08:00
/// Maps the facet field id of the string facets with an FST containing all the facets values.
pub facet_id_string_fst : Database < OwnedType < BEU16 > , FstSetCodec > ,
2021-04-21 17:49:26 +08:00
/// Maps the document id, the facet field id and the numbers.
pub field_id_docid_facet_f64s : Database < FieldDocIdFacetF64Codec , Unit > ,
/// Maps the document id, the facet field id and the strings.
2021-07-15 16:19:35 +08:00
pub field_id_docid_facet_strings : Database < FieldDocIdFacetStringCodec , Str > ,
2021-04-21 17:49:26 +08:00
2023-06-14 20:20:05 +08:00
/// Maps a vector id to the document id that have it.
pub vector_id_docid : Database < OwnedType < BEU32 > , OwnedType < BEU32 > > ,
2020-10-22 20:23:33 +08:00
/// Maps the document id to the document as an obkv store.
2022-06-13 22:39:17 +08:00
pub ( crate ) documents : Database < OwnedType < BEU32 > , ObkvCodec > ,
2020-10-21 21:55:48 +08:00
}
impl Index {
2022-10-25 20:37:56 +08:00
pub fn new_with_creation_dates < P : AsRef < Path > > (
mut options : heed ::EnvOpenOptions ,
path : P ,
created_at : OffsetDateTime ,
updated_at : OffsetDateTime ,
) -> Result < Index > {
2021-06-15 17:06:42 +08:00
use db_name ::* ;
2023-07-20 23:57:07 +08:00
options . max_dbs ( 25 ) ;
2023-05-15 17:23:58 +08:00
unsafe { options . flag ( Flags ::MdbAlwaysFreePages ) } ;
2020-10-30 17:56:35 +08:00
let env = options . open ( path ) ? ;
2023-05-15 16:15:33 +08:00
let mut wtxn = env . write_txn ( ) ? ;
let main = env . create_poly_database ( & mut wtxn , Some ( MAIN ) ) ? ;
let word_docids = env . create_database ( & mut wtxn , Some ( WORD_DOCIDS ) ) ? ;
let exact_word_docids = env . create_database ( & mut wtxn , Some ( EXACT_WORD_DOCIDS ) ) ? ;
let word_prefix_docids = env . create_database ( & mut wtxn , Some ( WORD_PREFIX_DOCIDS ) ) ? ;
let exact_word_prefix_docids =
env . create_database ( & mut wtxn , Some ( EXACT_WORD_PREFIX_DOCIDS ) ) ? ;
let word_pair_proximity_docids =
env . create_database ( & mut wtxn , Some ( WORD_PAIR_PROXIMITY_DOCIDS ) ) ? ;
let script_language_docids =
env . create_database ( & mut wtxn , Some ( SCRIPT_LANGUAGE_DOCIDS ) ) ? ;
2021-06-17 00:33:33 +08:00
let word_prefix_pair_proximity_docids =
2023-05-15 16:15:33 +08:00
env . create_database ( & mut wtxn , Some ( WORD_PREFIX_PAIR_PROXIMITY_DOCIDS ) ) ? ;
2022-09-14 21:33:13 +08:00
let prefix_word_pair_proximity_docids =
2023-05-15 16:15:33 +08:00
env . create_database ( & mut wtxn , Some ( PREFIX_WORD_PAIR_PROXIMITY_DOCIDS ) ) ? ;
let word_position_docids = env . create_database ( & mut wtxn , Some ( WORD_POSITION_DOCIDS ) ) ? ;
let word_fid_docids = env . create_database ( & mut wtxn , Some ( WORD_FIELD_ID_DOCIDS ) ) ? ;
let field_id_word_count_docids =
env . create_database ( & mut wtxn , Some ( FIELD_ID_WORD_COUNT_DOCIDS ) ) ? ;
let word_prefix_position_docids =
env . create_database ( & mut wtxn , Some ( WORD_PREFIX_POSITION_DOCIDS ) ) ? ;
let word_prefix_fid_docids =
env . create_database ( & mut wtxn , Some ( WORD_PREFIX_FIELD_ID_DOCIDS ) ) ? ;
let facet_id_f64_docids = env . create_database ( & mut wtxn , Some ( FACET_ID_F64_DOCIDS ) ) ? ;
let facet_id_string_docids =
env . create_database ( & mut wtxn , Some ( FACET_ID_STRING_DOCIDS ) ) ? ;
2023-07-20 23:57:07 +08:00
let facet_id_normalized_string_strings =
env . create_database ( & mut wtxn , Some ( FACET_ID_NORMALIZED_STRING_STRINGS ) ) ? ;
2023-05-02 15:34:28 +08:00
let facet_id_string_fst = env . create_database ( & mut wtxn , Some ( FACET_ID_STRING_FST ) ) ? ;
2023-05-15 16:15:33 +08:00
let facet_id_exists_docids =
env . create_database ( & mut wtxn , Some ( FACET_ID_EXISTS_DOCIDS ) ) ? ;
let facet_id_is_null_docids =
env . create_database ( & mut wtxn , Some ( FACET_ID_IS_NULL_DOCIDS ) ) ? ;
let facet_id_is_empty_docids =
env . create_database ( & mut wtxn , Some ( FACET_ID_IS_EMPTY_DOCIDS ) ) ? ;
let field_id_docid_facet_f64s =
env . create_database ( & mut wtxn , Some ( FIELD_ID_DOCID_FACET_F64S ) ) ? ;
2021-06-17 00:33:33 +08:00
let field_id_docid_facet_strings =
2023-05-15 16:15:33 +08:00
env . create_database ( & mut wtxn , Some ( FIELD_ID_DOCID_FACET_STRINGS ) ) ? ;
2023-06-14 20:20:05 +08:00
let vector_id_docid = env . create_database ( & mut wtxn , Some ( VECTOR_ID_DOCID ) ) ? ;
2023-05-15 16:15:33 +08:00
let documents = env . create_database ( & mut wtxn , Some ( DOCUMENTS ) ) ? ;
wtxn . commit ( ) ? ;
2020-10-30 17:56:35 +08:00
2022-10-25 20:37:56 +08:00
Index ::set_creation_dates ( & env , main , created_at , updated_at ) ? ;
2021-03-12 01:32:04 +08:00
2020-11-11 23:04:04 +08:00
Ok ( Index {
env ,
main ,
word_docids ,
2022-03-24 22:22:57 +08:00
exact_word_docids ,
2021-02-03 17:30:33 +08:00
word_prefix_docids ,
2022-03-25 17:49:34 +08:00
exact_word_prefix_docids ,
2020-11-11 23:04:04 +08:00
word_pair_proximity_docids ,
2022-10-12 19:21:35 +08:00
script_language_docids ,
2021-02-10 17:28:15 +08:00
word_prefix_pair_proximity_docids ,
2022-09-14 21:33:13 +08:00
prefix_word_pair_proximity_docids ,
2021-10-05 17:18:42 +08:00
word_position_docids ,
2023-03-23 16:22:01 +08:00
word_fid_docids ,
2021-10-05 17:18:42 +08:00
word_prefix_position_docids ,
2023-03-23 16:22:01 +08:00
word_prefix_fid_docids ,
2021-05-27 21:27:41 +08:00
field_id_word_count_docids ,
2021-04-21 17:49:26 +08:00
facet_id_f64_docids ,
facet_id_string_docids ,
2023-07-20 23:57:07 +08:00
facet_id_normalized_string_strings ,
2023-05-02 15:34:28 +08:00
facet_id_string_fst ,
2022-07-19 15:30:19 +08:00
facet_id_exists_docids ,
2023-03-08 23:14:00 +08:00
facet_id_is_null_docids ,
2023-03-15 01:08:12 +08:00
facet_id_is_empty_docids ,
2021-04-21 17:49:26 +08:00
field_id_docid_facet_f64s ,
field_id_docid_facet_strings ,
2023-06-14 20:20:05 +08:00
vector_id_docid ,
2020-11-11 23:04:04 +08:00
documents ,
} )
2020-10-30 17:56:35 +08:00
}
2022-10-25 20:37:56 +08:00
pub fn new < P : AsRef < Path > > ( options : heed ::EnvOpenOptions , path : P ) -> Result < Index > {
let now = OffsetDateTime ::now_utc ( ) ;
2022-10-26 02:58:31 +08:00
Self ::new_with_creation_dates ( options , path , now , now )
2022-10-25 20:37:56 +08:00
}
fn set_creation_dates (
env : & heed ::Env ,
main : PolyDatabase ,
created_at : OffsetDateTime ,
updated_at : OffsetDateTime ,
) -> heed ::Result < ( ) > {
2021-04-21 17:49:26 +08:00
let mut txn = env . write_txn ( ) ? ;
// The db was just created, we update its metadata with the relevant information.
2022-02-15 18:41:55 +08:00
if main . get ::< _ , Str , SerdeJson < OffsetDateTime > > ( & txn , main_key ::CREATED_AT_KEY ) ? . is_none ( )
{
main . put ::< _ , Str , SerdeJson < OffsetDateTime > > (
& mut txn ,
main_key ::UPDATED_AT_KEY ,
2022-10-25 20:37:56 +08:00
& updated_at ,
2022-02-15 18:41:55 +08:00
) ? ;
main . put ::< _ , Str , SerdeJson < OffsetDateTime > > (
& mut txn ,
main_key ::CREATED_AT_KEY ,
2022-10-25 20:37:56 +08:00
& created_at ,
2022-02-15 18:41:55 +08:00
) ? ;
2021-04-21 17:49:26 +08:00
txn . commit ( ) ? ;
}
Ok ( ( ) )
}
2020-10-30 17:56:35 +08:00
/// Create a write transaction to be able to write into the index.
2020-11-02 18:48:33 +08:00
pub fn write_txn ( & self ) -> heed ::Result < RwTxn > {
2020-10-30 17:56:35 +08:00
self . env . write_txn ( )
}
/// Create a read transaction to be able to read the index.
2020-11-02 18:48:33 +08:00
pub fn read_txn ( & self ) -> heed ::Result < RoTxn > {
2020-10-30 17:56:35 +08:00
self . env . read_txn ( )
2020-10-21 21:55:48 +08:00
}
2020-10-30 18:46:00 +08:00
/// Returns the canonicalized path where the heed `Env` of this `Index` lives.
pub fn path ( & self ) -> & Path {
self . env . path ( )
}
2022-08-11 17:15:46 +08:00
/// Returns the size used by the index without the cached pages.
pub fn used_size ( & self ) -> Result < u64 > {
Ok ( self . env . non_free_pages_size ( ) ? )
}
/// Returns the real size used by the index.
pub fn on_disk_size ( & self ) -> Result < u64 > {
Ok ( self . env . real_disk_size ( ) ? )
}
2023-01-10 16:46:28 +08:00
/// Returns the map size the underlying environment was opened with, in bytes.
///
/// This value does not represent the current on-disk size of the index.
///
/// This value is the maximum between the map size passed during the opening of the index
/// and the on-disk size of the index at the time of opening.
pub fn map_size ( & self ) -> Result < usize > {
Ok ( self . env . map_size ( ) ? )
}
2022-06-22 22:23:11 +08:00
pub fn copy_to_path < P : AsRef < Path > > ( & self , path : P , option : CompactionOption ) -> Result < File > {
self . env . copy_to_path ( path , option ) . map_err ( Into ::into )
}
2020-10-30 18:46:14 +08:00
/// Returns an `EnvClosingEvent` that can be used to wait for the closing event,
/// multiple threads can wait on this event.
///
/// Make sure that you drop all the copies of `Index`es you have, env closing are triggered
/// when all references are dropped, the last one will eventually close the environment.
pub fn prepare_for_closing ( self ) -> heed ::EnvClosingEvent {
self . env . prepare_for_closing ( )
}
2020-11-02 18:48:33 +08:00
/* documents ids */
2020-10-22 21:33:09 +08:00
/// Writes the documents ids that corresponds to the user-ids-documents-ids FST.
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_documents_ids (
& self ,
wtxn : & mut RwTxn ,
docids : & RoaringBitmap ,
) -> heed ::Result < ( ) > {
2021-06-15 17:06:42 +08:00
self . main . put ::< _ , Str , RoaringBitmapCodec > ( wtxn , main_key ::DOCUMENTS_IDS_KEY , docids )
2020-10-21 21:55:48 +08:00
}
2020-10-22 21:33:09 +08:00
/// Returns the internal documents ids.
2020-11-02 18:48:33 +08:00
pub fn documents_ids ( & self , rtxn : & RoTxn ) -> heed ::Result < RoaringBitmap > {
2021-06-17 00:33:33 +08:00
Ok ( self
. main
. get ::< _ , Str , RoaringBitmapCodec > ( rtxn , main_key ::DOCUMENTS_IDS_KEY ) ?
. unwrap_or_default ( ) )
2020-10-21 21:55:48 +08:00
}
2021-03-09 17:24:27 +08:00
/// Returns the number of documents indexed in the database.
2021-06-14 22:46:19 +08:00
pub fn number_of_documents ( & self , rtxn : & RoTxn ) -> Result < u64 > {
2021-06-17 00:33:33 +08:00
let count =
self . main . get ::< _ , Str , RoaringBitmapLenCodec > ( rtxn , main_key ::DOCUMENTS_IDS_KEY ) ? ;
2021-03-09 17:24:27 +08:00
Ok ( count . unwrap_or_default ( ) )
}
2020-11-02 18:48:33 +08:00
/* primary key */
2020-10-31 18:28:48 +08:00
/// Writes the documents primary key, this is the field name that is used to store the id.
2021-06-15 19:45:20 +08:00
pub ( crate ) fn put_primary_key ( & self , wtxn : & mut RwTxn , primary_key : & str ) -> heed ::Result < ( ) > {
2022-02-15 18:41:55 +08:00
self . set_updated_at ( wtxn , & OffsetDateTime ::now_utc ( ) ) ? ;
2022-10-25 03:34:13 +08:00
self . main . put ::< _ , Str , Str > ( wtxn , main_key ::PRIMARY_KEY_KEY , primary_key )
2020-10-31 18:28:48 +08:00
}
2020-11-02 00:52:04 +08:00
/// Deletes the primary key of the documents, this can be done to reset indexes settings.
2021-06-30 16:07:31 +08:00
pub ( crate ) fn delete_primary_key ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2021-06-15 17:06:42 +08:00
self . main . delete ::< _ , Str > ( wtxn , main_key ::PRIMARY_KEY_KEY )
2020-10-31 18:28:48 +08:00
}
/// Returns the documents primary key, `None` if it hasn't been defined.
2021-01-21 00:27:43 +08:00
pub fn primary_key < ' t > ( & self , rtxn : & ' t RoTxn ) -> heed ::Result < Option < & ' t str > > {
2021-06-15 17:06:42 +08:00
self . main . get ::< _ , Str , Str > ( rtxn , main_key ::PRIMARY_KEY_KEY )
2020-10-31 18:28:48 +08:00
}
2020-11-22 18:54:04 +08:00
/* external documents ids */
2020-11-02 18:48:33 +08:00
2020-11-23 00:28:41 +08:00
/// Writes the external documents ids and internal ids (i.e. `u32`).
2023-01-31 00:18:02 +08:00
pub ( crate ) fn put_external_documents_ids (
2020-11-23 00:28:41 +08:00
& self ,
wtxn : & mut RwTxn ,
2023-01-31 00:18:02 +08:00
external_documents_ids : & ExternalDocumentsIds < '_ > ,
2021-06-17 00:33:33 +08:00
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , ByteSlice > (
wtxn ,
2023-10-24 23:02:55 +08:00
main_key ::EXTERNAL_DOCUMENTS_IDS_KEY ,
external_documents_ids . as_bytes ( ) ,
2021-06-17 00:33:33 +08:00
) ? ;
2020-11-23 00:28:41 +08:00
Ok ( ( ) )
2020-10-21 21:55:48 +08:00
}
2020-11-23 00:28:41 +08:00
/// Returns the external documents ids map which associate the external ids
2020-10-22 21:33:09 +08:00
/// with the internal ids (i.e. `u32`).
2021-06-14 22:46:19 +08:00
pub fn external_documents_ids < ' t > ( & self , rtxn : & ' t RoTxn ) -> Result < ExternalDocumentsIds < ' t > > {
2023-10-24 23:02:55 +08:00
let fst = self . main . get ::< _ , Str , ByteSlice > ( rtxn , main_key ::EXTERNAL_DOCUMENTS_IDS_KEY ) ? ;
let fst = match fst {
Some ( fst ) = > fst ::Map ::new ( fst ) ? . map_data ( Cow ::Borrowed ) ? ,
2020-11-23 00:53:33 +08:00
None = > fst ::Map ::default ( ) . map_data ( Cow ::Owned ) ? ,
} ;
2023-10-24 23:02:55 +08:00
Ok ( ExternalDocumentsIds ::new ( fst ) )
2020-10-21 21:55:48 +08:00
}
2020-11-02 18:48:33 +08:00
/* fields ids map */
2020-10-22 21:33:09 +08:00
/// Writes the fields ids map which associate the documents keys with an internal field id
/// (i.e. `u8`), this field id is used to identify fields in the obkv documents.
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_fields_ids_map (
& self ,
wtxn : & mut RwTxn ,
map : & FieldsIdsMap ,
) -> heed ::Result < ( ) > {
2021-06-15 17:06:42 +08:00
self . main . put ::< _ , Str , SerdeJson < FieldsIdsMap > > ( wtxn , main_key ::FIELDS_IDS_MAP_KEY , map )
2020-10-22 21:33:09 +08:00
}
/// Returns the fields ids map which associate the documents keys with an internal field id
/// (i.e. `u8`), this field id is used to identify fields in the obkv documents.
2020-11-02 18:48:33 +08:00
pub fn fields_ids_map ( & self , rtxn : & RoTxn ) -> heed ::Result < FieldsIdsMap > {
2021-06-17 00:33:33 +08:00
Ok ( self
. main
. get ::< _ , Str , SerdeJson < FieldsIdsMap > > ( rtxn , main_key ::FIELDS_IDS_MAP_KEY ) ?
. unwrap_or_default ( ) )
2020-10-22 21:33:09 +08:00
}
2021-08-23 22:32:11 +08:00
/* geo rtree */
2021-09-09 18:20:08 +08:00
/// Writes the provided `rtree` which associates coordinates to documents ids.
2021-08-24 00:41:48 +08:00
pub ( crate ) fn put_geo_rtree (
2021-08-23 22:32:11 +08:00
& self ,
wtxn : & mut RwTxn ,
rtree : & RTree < GeoPoint > ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeBincode < RTree < GeoPoint > > > ( wtxn , main_key ::GEO_RTREE_KEY , rtree )
}
2021-09-09 18:20:08 +08:00
/// Delete the `rtree` which associates coordinates to documents ids.
2021-08-23 22:32:11 +08:00
pub ( crate ) fn delete_geo_rtree ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( wtxn , main_key ::GEO_RTREE_KEY )
}
2021-09-09 18:20:08 +08:00
/// Returns the `rtree` which associates coordinates to documents ids.
2023-01-31 18:11:49 +08:00
pub fn geo_rtree ( & self , rtxn : & RoTxn ) -> Result < Option < RTree < GeoPoint > > > {
2021-08-23 22:32:11 +08:00
match self
. main
. get ::< _ , Str , SerdeBincode < RTree < GeoPoint > > > ( rtxn , main_key ::GEO_RTREE_KEY ) ?
{
Some ( rtree ) = > Ok ( Some ( rtree ) ) ,
None = > Ok ( None ) ,
}
}
2021-08-26 23:49:50 +08:00
/* geo faceted */
2021-09-09 18:20:08 +08:00
/// Writes the documents ids that are faceted with a _geo field.
2021-08-26 23:49:50 +08:00
pub ( crate ) fn put_geo_faceted_documents_ids (
& self ,
wtxn : & mut RwTxn ,
docids : & RoaringBitmap ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , RoaringBitmapCodec > (
wtxn ,
main_key ::GEO_FACETED_DOCUMENTS_IDS_KEY ,
docids ,
)
}
2021-09-09 18:20:08 +08:00
/// Delete the documents ids that are faceted with a _geo field.
pub ( crate ) fn delete_geo_faceted_documents_ids ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( wtxn , main_key ::GEO_FACETED_DOCUMENTS_IDS_KEY )
2021-08-26 23:49:50 +08:00
}
2021-09-20 22:10:39 +08:00
/// Retrieve all the documents ids that are faceted with a _geo field.
2021-08-26 23:49:50 +08:00
pub fn geo_faceted_documents_ids ( & self , rtxn : & RoTxn ) -> heed ::Result < RoaringBitmap > {
match self
. main
. get ::< _ , Str , RoaringBitmapCodec > ( rtxn , main_key ::GEO_FACETED_DOCUMENTS_IDS_KEY ) ?
{
Some ( docids ) = > Ok ( docids ) ,
None = > Ok ( RoaringBitmap ::new ( ) ) ,
}
}
2023-06-14 20:20:05 +08:00
/* vector HNSW */
2023-06-08 18:19:06 +08:00
2023-06-14 20:20:05 +08:00
/// Writes the provided `hnsw`.
pub ( crate ) fn put_vector_hnsw ( & self , wtxn : & mut RwTxn , hnsw : & Hnsw ) -> heed ::Result < ( ) > {
2023-06-21 23:07:02 +08:00
// We must delete all the chunks before we write the new HNSW chunks.
self . delete_vector_hnsw ( wtxn ) ? ;
let chunk_size = 1024 * 1024 * ( 1024 + 512 ) ; // 1.5 GiB
let bytes = bincode ::serialize ( hnsw ) . map_err ( | _ | heed ::Error ::Encoding ) ? ;
for ( i , chunk ) in bytes . chunks ( chunk_size ) . enumerate ( ) {
let i = i as u32 ;
let mut key = main_key ::VECTOR_HNSW_KEY_PREFIX . as_bytes ( ) . to_vec ( ) ;
key . extend_from_slice ( & i . to_be_bytes ( ) ) ;
self . main . put ::< _ , ByteSlice , ByteSlice > ( wtxn , & key , chunk ) ? ;
}
Ok ( ( ) )
2023-06-08 18:19:06 +08:00
}
2023-06-14 20:20:05 +08:00
/// Delete the `hnsw`.
pub ( crate ) fn delete_vector_hnsw ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2023-06-21 23:07:02 +08:00
let mut iter = self . main . prefix_iter_mut ::< _ , ByteSlice , DecodeIgnore > (
wtxn ,
main_key ::VECTOR_HNSW_KEY_PREFIX . as_bytes ( ) ,
) ? ;
let mut deleted = false ;
2023-06-21 23:10:19 +08:00
while iter . next ( ) . transpose ( ) ? . is_some ( ) {
2023-06-21 23:07:02 +08:00
// We do not keep a reference to the key or the value.
unsafe { deleted | = iter . del_current ( ) ? } ;
}
Ok ( deleted )
2023-06-08 18:19:06 +08:00
}
2023-06-14 20:20:05 +08:00
/// Returns the `hnsw`.
pub fn vector_hnsw ( & self , rtxn : & RoTxn ) -> Result < Option < Hnsw > > {
2023-06-21 23:07:02 +08:00
let mut slices = Vec ::new ( ) ;
for result in
self . main . prefix_iter ::< _ , Str , ByteSlice > ( rtxn , main_key ::VECTOR_HNSW_KEY_PREFIX ) ?
{
let ( _ , slice ) = result ? ;
slices . push ( slice ) ;
}
if slices . is_empty ( ) {
Ok ( None )
} else {
let readable_slices : ReadableSlices < _ > = slices . into_iter ( ) . collect ( ) ;
Ok ( Some ( bincode ::deserialize_from ( readable_slices ) . map_err ( | _ | heed ::Error ::Decoding ) ? ) )
2023-06-08 18:19:06 +08:00
}
}
2021-06-17 21:16:20 +08:00
/* field distribution */
2021-03-31 23:14:23 +08:00
2021-06-17 21:16:20 +08:00
/// Writes the field distribution which associates every field name with
2021-04-01 15:07:16 +08:00
/// the number of times it occurs in the documents.
2021-06-17 21:16:20 +08:00
pub ( crate ) fn put_field_distribution (
2021-06-17 00:33:33 +08:00
& self ,
wtxn : & mut RwTxn ,
2021-06-21 21:57:41 +08:00
distribution : & FieldDistribution ,
2021-06-17 00:33:33 +08:00
) -> heed ::Result < ( ) > {
2021-06-21 21:57:41 +08:00
self . main . put ::< _ , Str , SerdeJson < FieldDistribution > > (
2021-06-17 00:33:33 +08:00
wtxn ,
2021-06-17 21:16:20 +08:00
main_key ::FIELD_DISTRIBUTION_KEY ,
2021-06-17 00:33:33 +08:00
distribution ,
)
2021-03-31 23:14:23 +08:00
}
2021-03-31 23:14:23 +08:00
2021-06-17 21:16:20 +08:00
/// Returns the field distribution which associates every field name with
2021-04-01 15:07:16 +08:00
/// the number of times it occurs in the documents.
2021-06-21 21:57:41 +08:00
pub fn field_distribution ( & self , rtxn : & RoTxn ) -> heed ::Result < FieldDistribution > {
2021-06-17 00:33:33 +08:00
Ok ( self
. main
2021-06-21 21:57:41 +08:00
. get ::< _ , Str , SerdeJson < FieldDistribution > > ( rtxn , main_key ::FIELD_DISTRIBUTION_KEY ) ?
2021-06-17 00:33:33 +08:00
. unwrap_or_default ( ) )
2021-03-31 23:14:23 +08:00
}
2020-11-02 18:48:33 +08:00
/* displayed fields */
2021-01-21 00:27:43 +08:00
/// Writes the fields that must be displayed in the defined order.
2020-11-02 18:45:16 +08:00
/// There must be not be any duplicate field id.
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_displayed_fields (
& self ,
wtxn : & mut RwTxn ,
fields : & [ & str ] ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeBincode < & [ & str ] > > (
wtxn ,
main_key ::DISPLAYED_FIELDS_KEY ,
& fields ,
)
2020-11-02 18:45:16 +08:00
}
/// Deletes the displayed fields ids, this will make the engine to display
/// all the documents attributes in the order of the `FieldsIdsMap`.
2021-06-30 16:07:31 +08:00
pub ( crate ) fn delete_displayed_fields ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2021-06-15 17:06:42 +08:00
self . main . delete ::< _ , Str > ( wtxn , main_key ::DISPLAYED_FIELDS_KEY )
2020-11-02 18:45:16 +08:00
}
2021-01-21 00:27:43 +08:00
/// Returns the displayed fields in the order they were set by the user. If it returns
/// `None` it means that all the attributes are set as displayed in the order of the `FieldsIdsMap`.
pub fn displayed_fields < ' t > ( & self , rtxn : & ' t RoTxn ) -> heed ::Result < Option < Vec < & ' t str > > > {
2021-06-15 17:06:42 +08:00
self . main . get ::< _ , Str , SerdeBincode < Vec < & ' t str > > > ( rtxn , main_key ::DISPLAYED_FIELDS_KEY )
2021-01-21 00:27:43 +08:00
}
2021-06-15 17:51:32 +08:00
/// Identical to `displayed_fields`, but returns the ids instead.
pub fn displayed_fields_ids ( & self , rtxn : & RoTxn ) -> Result < Option < Vec < FieldId > > > {
match self . displayed_fields ( rtxn ) ? {
Some ( fields ) = > {
let fields_ids_map = self . fields_ids_map ( rtxn ) ? ;
let mut fields_ids = Vec ::new ( ) ;
for name in fields . into_iter ( ) {
2021-07-22 23:11:17 +08:00
if let Some ( field_id ) = fields_ids_map . id ( name ) {
fields_ids . push ( field_id ) ;
2021-06-15 17:51:32 +08:00
}
}
Ok ( Some ( fields_ids ) )
2021-06-17 00:33:33 +08:00
}
2021-06-15 17:51:32 +08:00
None = > Ok ( None ) ,
}
2020-11-02 18:45:16 +08:00
}
2023-07-24 17:20:07 +08:00
/* remove hidden fields */
pub fn remove_hidden_fields (
& self ,
rtxn : & RoTxn ,
fields : impl IntoIterator < Item = impl AsRef < str > > ,
) -> Result < ( BTreeSet < String > , bool ) > {
let mut valid_fields =
fields . into_iter ( ) . map ( | f | f . as_ref ( ) . to_string ( ) ) . collect ::< BTreeSet < String > > ( ) ;
let fields_len = valid_fields . len ( ) ;
if let Some ( dn ) = self . displayed_fields ( rtxn ) ? {
let displayable_names = dn . iter ( ) . map ( | s | s . to_string ( ) ) . collect ( ) ;
valid_fields = & valid_fields & & displayable_names ;
}
let hidden_fields = fields_len > valid_fields . len ( ) ;
Ok ( ( valid_fields , hidden_fields ) )
}
2020-11-02 00:52:04 +08:00
/* searchable fields */
2022-05-16 21:22:52 +08:00
/// Write the user defined searchable fields and generate the real searchable fields from the specified fields ids map.
pub ( crate ) fn put_all_searchable_fields_from_fields_ids_map (
2021-06-17 00:33:33 +08:00
& self ,
wtxn : & mut RwTxn ,
2022-05-16 21:22:52 +08:00
user_fields : & [ & str ] ,
fields_ids_map : & FieldsIdsMap ,
2021-06-17 00:33:33 +08:00
) -> heed ::Result < ( ) > {
2022-05-16 21:22:52 +08:00
// We can write the user defined searchable fields as-is.
self . put_user_defined_searchable_fields ( wtxn , user_fields ) ? ;
// Now we generate the real searchable fields:
// 1. Take the user defined searchable fields as-is to keep the priority defined by the attributes criterion.
// 2. Iterate over the user defined searchable fields.
// 3. If a user defined field is a subset of a field defined in the fields_ids_map
// (ie doggo.name is a subset of doggo) then we push it at the end of the fields.
let mut real_fields = user_fields . to_vec ( ) ;
for field_from_map in fields_ids_map . names ( ) {
for user_field in user_fields {
if crate ::is_faceted_by ( field_from_map , user_field )
& & ! user_fields . contains ( & field_from_map )
{
real_fields . push ( field_from_map ) ;
}
}
}
self . put_searchable_fields ( wtxn , & real_fields )
}
pub ( crate ) fn delete_all_searchable_fields ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2022-11-29 02:12:19 +08:00
let did_delete_searchable = self . delete_searchable_fields ( wtxn ) ? ;
let did_delete_user_defined = self . delete_user_defined_searchable_fields ( wtxn ) ? ;
Ok ( did_delete_searchable | | did_delete_user_defined )
2022-05-16 21:22:52 +08:00
}
/// Writes the searchable fields, when this list is specified, only these are indexed.
fn put_searchable_fields ( & self , wtxn : & mut RwTxn , fields : & [ & str ] ) -> heed ::Result < ( ) > {
2021-06-17 00:33:33 +08:00
self . main . put ::< _ , Str , SerdeBincode < & [ & str ] > > (
wtxn ,
main_key ::SEARCHABLE_FIELDS_KEY ,
& fields ,
)
2020-11-02 00:52:04 +08:00
}
/// Deletes the searchable fields, when no fields are specified, all fields are indexed.
2022-05-16 21:22:52 +08:00
fn delete_searchable_fields ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2021-06-15 17:06:42 +08:00
self . main . delete ::< _ , Str > ( wtxn , main_key ::SEARCHABLE_FIELDS_KEY )
2020-11-02 00:52:04 +08:00
}
2021-01-21 00:27:43 +08:00
/// Returns the searchable fields, those are the fields that are indexed,
2020-11-02 00:52:04 +08:00
/// if the searchable fields aren't there it means that **all** the fields are indexed.
2021-01-21 00:27:43 +08:00
pub fn searchable_fields < ' t > ( & self , rtxn : & ' t RoTxn ) -> heed ::Result < Option < Vec < & ' t str > > > {
2021-06-15 17:06:42 +08:00
self . main . get ::< _ , Str , SerdeBincode < Vec < & ' t str > > > ( rtxn , main_key ::SEARCHABLE_FIELDS_KEY )
2021-01-21 00:27:43 +08:00
}
/// Identical to `searchable_fields`, but returns the ids instead.
2021-06-15 17:51:32 +08:00
pub fn searchable_fields_ids ( & self , rtxn : & RoTxn ) -> Result < Option < Vec < FieldId > > > {
2021-01-21 00:27:43 +08:00
match self . searchable_fields ( rtxn ) ? {
2021-06-15 17:51:32 +08:00
Some ( fields ) = > {
let fields_ids_map = self . fields_ids_map ( rtxn ) ? ;
let mut fields_ids = Vec ::new ( ) ;
for name in fields {
2021-07-22 23:11:17 +08:00
if let Some ( field_id ) = fields_ids_map . id ( name ) {
fields_ids . push ( field_id ) ;
2021-06-15 17:51:32 +08:00
}
2021-01-21 00:27:43 +08:00
}
2021-06-15 17:51:32 +08:00
Ok ( Some ( fields_ids ) )
2021-06-17 00:33:33 +08:00
}
2021-01-21 00:27:43 +08:00
None = > Ok ( None ) ,
}
2020-11-02 00:52:04 +08:00
}
2022-05-16 21:22:52 +08:00
/// Writes the searchable fields, when this list is specified, only these are indexed.
pub ( crate ) fn put_user_defined_searchable_fields (
& self ,
wtxn : & mut RwTxn ,
fields : & [ & str ] ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeBincode < _ > > (
wtxn ,
main_key ::USER_DEFINED_SEARCHABLE_FIELDS_KEY ,
& fields ,
)
}
/// Deletes the searchable fields, when no fields are specified, all fields are indexed.
pub ( crate ) fn delete_user_defined_searchable_fields (
& self ,
wtxn : & mut RwTxn ,
) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( wtxn , main_key ::USER_DEFINED_SEARCHABLE_FIELDS_KEY )
}
/// Returns the user defined searchable fields.
pub fn user_defined_searchable_fields < ' t > (
& self ,
rtxn : & ' t RoTxn ,
) -> heed ::Result < Option < Vec < & ' t str > > > {
self . main
. get ::< _ , Str , SerdeBincode < Vec < _ > > > ( rtxn , main_key ::USER_DEFINED_SEARCHABLE_FIELDS_KEY )
}
2021-06-01 18:19:55 +08:00
/* filterable fields */
2020-11-11 23:17:37 +08:00
2021-06-01 18:19:55 +08:00
/// Writes the filterable fields names in the database.
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_filterable_fields (
& self ,
wtxn : & mut RwTxn ,
fields : & HashSet < String > ,
) -> heed ::Result < ( ) > {
2021-06-15 17:06:42 +08:00
self . main . put ::< _ , Str , SerdeJson < _ > > ( wtxn , main_key ::FILTERABLE_FIELDS_KEY , fields )
2020-11-11 23:17:37 +08:00
}
2021-06-01 18:19:55 +08:00
/// Deletes the filterable fields ids in the database.
2021-06-30 16:07:31 +08:00
pub ( crate ) fn delete_filterable_fields ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2021-06-15 17:06:42 +08:00
self . main . delete ::< _ , Str > ( wtxn , main_key ::FILTERABLE_FIELDS_KEY )
2020-11-11 23:17:37 +08:00
}
2021-06-01 18:19:55 +08:00
/// Returns the filterable fields names.
pub fn filterable_fields ( & self , rtxn : & RoTxn ) -> heed ::Result < HashSet < String > > {
2021-06-17 00:33:33 +08:00
Ok ( self
. main
. get ::< _ , Str , SerdeJson < _ > > ( rtxn , main_key ::FILTERABLE_FIELDS_KEY ) ?
. unwrap_or_default ( ) )
2021-06-01 18:19:55 +08:00
}
2021-06-15 17:51:32 +08:00
/// Identical to `filterable_fields`, but returns ids instead.
pub fn filterable_fields_ids ( & self , rtxn : & RoTxn ) -> Result < HashSet < FieldId > > {
let fields = self . filterable_fields ( rtxn ) ? ;
2021-06-01 18:19:55 +08:00
let fields_ids_map = self . fields_ids_map ( rtxn ) ? ;
2021-06-15 17:51:32 +08:00
let mut fields_ids = HashSet ::new ( ) ;
for name in fields {
2021-07-22 23:11:17 +08:00
if let Some ( field_id ) = fields_ids_map . id ( & name ) {
fields_ids . insert ( field_id ) ;
2021-06-15 17:51:32 +08:00
}
}
Ok ( fields_ids )
2021-06-01 18:19:55 +08:00
}
2021-06-01 18:20:29 +08:00
2021-08-23 17:37:18 +08:00
/* sortable fields */
/// Writes the sortable fields names in the database.
pub ( crate ) fn put_sortable_fields (
& self ,
wtxn : & mut RwTxn ,
fields : & HashSet < String > ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeJson < _ > > ( wtxn , main_key ::SORTABLE_FIELDS_KEY , fields )
}
/// Deletes the sortable fields ids in the database.
pub ( crate ) fn delete_sortable_fields ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( wtxn , main_key ::SORTABLE_FIELDS_KEY )
}
/// Returns the sortable fields names.
pub fn sortable_fields ( & self , rtxn : & RoTxn ) -> heed ::Result < HashSet < String > > {
Ok ( self
. main
. get ::< _ , Str , SerdeJson < _ > > ( rtxn , main_key ::SORTABLE_FIELDS_KEY ) ?
. unwrap_or_default ( ) )
}
/// Identical to `sortable_fields`, but returns ids instead.
pub fn sortable_fields_ids ( & self , rtxn : & RoTxn ) -> Result < HashSet < FieldId > > {
let fields = self . sortable_fields ( rtxn ) ? ;
let fields_ids_map = self . fields_ids_map ( rtxn ) ? ;
Ok ( fields . into_iter ( ) . filter_map ( | name | fields_ids_map . id ( & name ) ) . collect ( ) )
}
2022-03-24 00:28:41 +08:00
/* faceted fields */
/// Writes the faceted fields in the database.
pub ( crate ) fn put_faceted_fields (
& self ,
wtxn : & mut RwTxn ,
fields : & HashSet < String > ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeJson < _ > > ( wtxn , main_key ::HIDDEN_FACETED_FIELDS_KEY , fields )
}
2021-06-01 18:20:29 +08:00
/// Returns the faceted fields names.
2022-03-24 00:28:41 +08:00
pub fn faceted_fields ( & self , rtxn : & RoTxn ) -> heed ::Result < HashSet < String > > {
Ok ( self
. main
. get ::< _ , Str , SerdeJson < _ > > ( rtxn , main_key ::HIDDEN_FACETED_FIELDS_KEY ) ?
. unwrap_or_default ( ) )
}
/// Identical to `faceted_fields`, but returns ids instead.
pub fn faceted_fields_ids ( & self , rtxn : & RoTxn ) -> Result < HashSet < FieldId > > {
let fields = self . faceted_fields ( rtxn ) ? ;
let fields_ids_map = self . fields_ids_map ( rtxn ) ? ;
let mut fields_ids = HashSet ::new ( ) ;
for name in fields {
if let Some ( field_id ) = fields_ids_map . id ( & name ) {
fields_ids . insert ( field_id ) ;
}
}
Ok ( fields_ids )
}
/* faceted documents ids */
/// Returns the user defined faceted fields names.
2021-06-01 18:20:29 +08:00
///
2022-03-24 00:28:41 +08:00
/// The user faceted fields are the union of all the filterable, sortable, distinct, and Asc/Desc fields.
pub fn user_defined_faceted_fields ( & self , rtxn : & RoTxn ) -> Result < HashSet < String > > {
2021-06-01 18:20:29 +08:00
let filterable_fields = self . filterable_fields ( rtxn ) ? ;
2021-08-23 17:37:18 +08:00
let sortable_fields = self . sortable_fields ( rtxn ) ? ;
2021-06-01 22:29:14 +08:00
let distinct_field = self . distinct_field ( rtxn ) ? ;
2021-06-17 00:33:33 +08:00
let asc_desc_fields =
self . criteria ( rtxn ) ? . into_iter ( ) . filter_map ( | criterion | match criterion {
2021-06-01 18:20:29 +08:00
Criterion ::Asc ( field ) | Criterion ::Desc ( field ) = > Some ( field ) ,
_otherwise = > None ,
} ) ;
let mut faceted_fields = filterable_fields ;
2021-08-23 17:37:18 +08:00
faceted_fields . extend ( sortable_fields ) ;
2021-06-01 18:20:29 +08:00
faceted_fields . extend ( asc_desc_fields ) ;
if let Some ( field ) = distinct_field {
faceted_fields . insert ( field . to_owned ( ) ) ;
}
Ok ( faceted_fields )
2021-01-21 00:27:43 +08:00
}
2022-03-24 00:28:41 +08:00
/// Identical to `user_defined_faceted_fields`, but returns ids instead.
pub fn user_defined_faceted_fields_ids ( & self , rtxn : & RoTxn ) -> Result < HashSet < FieldId > > {
2021-06-15 17:51:32 +08:00
let fields = self . faceted_fields ( rtxn ) ? ;
2021-01-21 00:27:43 +08:00
let fields_ids_map = self . fields_ids_map ( rtxn ) ? ;
2021-04-28 23:58:16 +08:00
2021-06-15 17:51:32 +08:00
let mut fields_ids = HashSet ::new ( ) ;
for name in fields . into_iter ( ) {
2021-07-22 23:11:17 +08:00
if let Some ( field_id ) = fields_ids_map . id ( & name ) {
fields_ids . insert ( field_id ) ;
2021-06-15 17:51:32 +08:00
}
}
Ok ( fields_ids )
2020-11-11 23:17:37 +08:00
}
2020-11-23 20:08:57 +08:00
/* faceted documents ids */
2023-03-08 23:57:42 +08:00
/// Retrieve all the documents which contain this field id set as null
pub fn null_faceted_documents_ids (
& self ,
rtxn : & RoTxn ,
field_id : FieldId ,
) -> heed ::Result < RoaringBitmap > {
match self . facet_id_is_null_docids . get ( rtxn , & BEU16 ::new ( field_id ) ) ? {
Some ( docids ) = > Ok ( docids ) ,
None = > Ok ( RoaringBitmap ::new ( ) ) ,
}
}
2023-03-15 01:08:12 +08:00
/// Retrieve all the documents which contain this field id and that is considered empty
pub fn empty_faceted_documents_ids (
& self ,
rtxn : & RoTxn ,
field_id : FieldId ,
) -> heed ::Result < RoaringBitmap > {
match self . facet_id_is_empty_docids . get ( rtxn , & BEU16 ::new ( field_id ) ) ? {
Some ( docids ) = > Ok ( docids ) ,
None = > Ok ( RoaringBitmap ::new ( ) ) ,
}
}
2022-07-19 15:30:19 +08:00
/// Retrieve all the documents which contain this field id
pub fn exists_faceted_documents_ids (
& self ,
rtxn : & RoTxn ,
field_id : FieldId ,
) -> heed ::Result < RoaringBitmap > {
2022-07-04 14:41:54 +08:00
match self . facet_id_exists_docids . get ( rtxn , & BEU16 ::new ( field_id ) ) ? {
2022-07-19 15:30:19 +08:00
Some ( docids ) = > Ok ( docids ) ,
None = > Ok ( RoaringBitmap ::new ( ) ) ,
2020-11-23 20:08:57 +08:00
}
}
2021-06-01 22:29:14 +08:00
/* distinct field */
2021-04-14 18:00:45 +08:00
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_distinct_field (
& self ,
wtxn : & mut RwTxn ,
distinct_field : & str ,
) -> heed ::Result < ( ) > {
2021-06-15 17:06:42 +08:00
self . main . put ::< _ , Str , Str > ( wtxn , main_key ::DISTINCT_FIELD_KEY , distinct_field )
2021-04-14 18:00:45 +08:00
}
2021-06-01 22:29:14 +08:00
pub fn distinct_field < ' a > ( & self , rtxn : & ' a RoTxn ) -> heed ::Result < Option < & ' a str > > {
2021-06-15 17:06:42 +08:00
self . main . get ::< _ , Str , Str > ( rtxn , main_key ::DISTINCT_FIELD_KEY )
2021-04-14 18:00:45 +08:00
}
2021-06-01 22:29:14 +08:00
pub ( crate ) fn delete_distinct_field ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2021-06-15 17:06:42 +08:00
self . main . delete ::< _ , Str > ( wtxn , main_key ::DISTINCT_FIELD_KEY )
2021-04-14 18:00:45 +08:00
}
2020-11-27 19:14:56 +08:00
/* criteria */
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_criteria (
& self ,
wtxn : & mut RwTxn ,
criteria : & [ Criterion ] ,
) -> heed ::Result < ( ) > {
2021-06-15 17:06:42 +08:00
self . main . put ::< _ , Str , SerdeJson < & [ Criterion ] > > ( wtxn , main_key ::CRITERIA_KEY , & criteria )
2020-11-27 19:14:56 +08:00
}
2021-06-30 16:07:31 +08:00
pub ( crate ) fn delete_criteria ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2021-06-15 17:06:42 +08:00
self . main . delete ::< _ , Str > ( wtxn , main_key ::CRITERIA_KEY )
2020-11-27 19:14:56 +08:00
}
pub fn criteria ( & self , rtxn : & RoTxn ) -> heed ::Result < Vec < Criterion > > {
2021-06-15 17:06:42 +08:00
match self . main . get ::< _ , Str , SerdeJson < Vec < Criterion > > > ( rtxn , main_key ::CRITERIA_KEY ) ? {
2020-11-27 19:14:56 +08:00
Some ( criteria ) = > Ok ( criteria ) ,
None = > Ok ( default_criteria ( ) ) ,
}
}
2020-11-02 18:48:33 +08:00
/* words fst */
2021-04-07 16:53:57 +08:00
/// Writes the FST which is the words dictionary of the engine.
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_words_fst < A : AsRef < [ u8 ] > > (
& self ,
wtxn : & mut RwTxn ,
fst : & fst ::Set < A > ,
) -> heed ::Result < ( ) > {
2021-06-15 17:06:42 +08:00
self . main . put ::< _ , Str , ByteSlice > ( wtxn , main_key ::WORDS_FST_KEY , fst . as_fst ( ) . as_bytes ( ) )
2020-10-21 21:55:48 +08:00
}
2021-04-07 16:53:57 +08:00
/// Returns the FST which is the words dictionary of the engine.
2021-06-14 22:46:19 +08:00
pub fn words_fst < ' t > ( & self , rtxn : & ' t RoTxn ) -> Result < fst ::Set < Cow < ' t , [ u8 ] > > > {
2021-06-15 17:06:42 +08:00
match self . main . get ::< _ , Str , ByteSlice > ( rtxn , main_key ::WORDS_FST_KEY ) ? {
2020-10-25 21:49:04 +08:00
Some ( bytes ) = > Ok ( fst ::Set ::new ( bytes ) ? . map_data ( Cow ::Borrowed ) ? ) ,
None = > Ok ( fst ::Set ::default ( ) . map_data ( Cow ::Owned ) ? ) ,
2020-10-21 21:55:48 +08:00
}
}
2021-03-30 01:15:47 +08:00
/* stop words */
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_stop_words < A : AsRef < [ u8 ] > > (
& self ,
wtxn : & mut RwTxn ,
fst : & fst ::Set < A > ,
) -> heed ::Result < ( ) > {
2021-06-15 17:06:42 +08:00
self . main . put ::< _ , Str , ByteSlice > ( wtxn , main_key ::STOP_WORDS_KEY , fst . as_fst ( ) . as_bytes ( ) )
2021-03-30 01:15:47 +08:00
}
2021-06-30 16:07:31 +08:00
pub ( crate ) fn delete_stop_words ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2021-06-15 17:06:42 +08:00
self . main . delete ::< _ , Str > ( wtxn , main_key ::STOP_WORDS_KEY )
2021-03-30 01:15:47 +08:00
}
2021-04-07 16:53:57 +08:00
2021-06-14 22:46:19 +08:00
pub fn stop_words < ' t > ( & self , rtxn : & ' t RoTxn ) -> Result < Option < fst ::Set < & ' t [ u8 ] > > > {
2021-06-15 17:06:42 +08:00
match self . main . get ::< _ , Str , ByteSlice > ( rtxn , main_key ::STOP_WORDS_KEY ) ? {
2021-03-30 01:15:47 +08:00
Some ( bytes ) = > Ok ( Some ( fst ::Set ::new ( bytes ) ? ) ) ,
None = > Ok ( None ) ,
}
}
2023-07-24 23:00:18 +08:00
/* non separator tokens */
pub ( crate ) fn put_non_separator_tokens (
& self ,
wtxn : & mut RwTxn ,
set : & BTreeSet < String > ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeBincode < _ > > ( wtxn , main_key ::NON_SEPARATOR_TOKENS_KEY , set )
}
pub ( crate ) fn delete_non_separator_tokens ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( wtxn , main_key ::NON_SEPARATOR_TOKENS_KEY )
}
2023-07-25 00:42:26 +08:00
pub fn non_separator_tokens ( & self , rtxn : & RoTxn ) -> Result < Option < BTreeSet < String > > > {
2023-07-24 23:00:18 +08:00
Ok ( self . main . get ::< _ , Str , SerdeBincode < BTreeSet < String > > > (
rtxn ,
main_key ::NON_SEPARATOR_TOKENS_KEY ,
) ? )
}
/* separator tokens */
pub ( crate ) fn put_separator_tokens (
& self ,
wtxn : & mut RwTxn ,
set : & BTreeSet < String > ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeBincode < _ > > ( wtxn , main_key ::SEPARATOR_TOKENS_KEY , set )
}
pub ( crate ) fn delete_separator_tokens ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( wtxn , main_key ::SEPARATOR_TOKENS_KEY )
}
2023-07-25 00:42:26 +08:00
pub fn separator_tokens ( & self , rtxn : & RoTxn ) -> Result < Option < BTreeSet < String > > > {
2023-07-24 23:00:18 +08:00
Ok ( self
. main
. get ::< _ , Str , SerdeBincode < BTreeSet < String > > > ( rtxn , main_key ::SEPARATOR_TOKENS_KEY ) ? )
}
/* separators easing method */
2023-07-25 00:42:26 +08:00
pub fn allowed_separators ( & self , rtxn : & RoTxn ) -> Result < Option < BTreeSet < String > > > {
2023-07-24 23:00:18 +08:00
let default_separators =
charabia ::separators ::DEFAULT_SEPARATORS . iter ( ) . map ( | s | s . to_string ( ) ) ;
let mut separators : Option < BTreeSet < _ > > = None ;
if let Some ( mut separator_tokens ) = self . separator_tokens ( rtxn ) ? {
separator_tokens . extend ( default_separators . clone ( ) ) ;
separators = Some ( separator_tokens ) ;
}
if let Some ( non_separator_tokens ) = self . non_separator_tokens ( rtxn ) ? {
separators = separators
. or_else ( | | Some ( default_separators . collect ( ) ) )
. map ( | separators | & separators - & non_separator_tokens ) ;
}
Ok ( separators )
}
/* dictionary */
pub ( crate ) fn put_dictionary (
& self ,
wtxn : & mut RwTxn ,
set : & BTreeSet < String > ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeBincode < _ > > ( wtxn , main_key ::DICTIONARY_KEY , set )
}
pub ( crate ) fn delete_dictionary ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( wtxn , main_key ::DICTIONARY_KEY )
}
2023-07-25 00:42:26 +08:00
pub fn dictionary ( & self , rtxn : & RoTxn ) -> Result < Option < BTreeSet < String > > > {
2023-07-24 23:00:18 +08:00
Ok ( self
. main
. get ::< _ , Str , SerdeBincode < BTreeSet < String > > > ( rtxn , main_key ::DICTIONARY_KEY ) ? )
}
2021-04-07 16:53:57 +08:00
/* synonyms */
2021-06-15 19:45:20 +08:00
pub ( crate ) fn put_synonyms (
2021-06-15 17:06:42 +08:00
& self ,
wtxn : & mut RwTxn ,
synonyms : & HashMap < Vec < String > , Vec < Vec < String > > > ,
2023-07-26 15:33:42 +08:00
user_defined_synonyms : & BTreeMap < String , Vec < String > > ,
2021-06-17 00:33:33 +08:00
) -> heed ::Result < ( ) > {
2023-07-26 15:33:42 +08:00
self . main . put ::< _ , Str , SerdeBincode < _ > > ( wtxn , main_key ::SYNONYMS_KEY , synonyms ) ? ;
self . main . put ::< _ , Str , SerdeBincode < _ > > (
wtxn ,
main_key ::USER_DEFINED_SYNONYMS_KEY ,
user_defined_synonyms ,
)
2021-04-07 16:53:57 +08:00
}
2021-06-30 16:07:31 +08:00
pub ( crate ) fn delete_synonyms ( & self , wtxn : & mut RwTxn ) -> heed ::Result < bool > {
2023-07-26 15:33:42 +08:00
self . main . delete ::< _ , Str > ( wtxn , main_key ::SYNONYMS_KEY ) ? ;
self . main . delete ::< _ , Str > ( wtxn , main_key ::USER_DEFINED_SYNONYMS_KEY )
}
pub fn user_defined_synonyms (
& self ,
rtxn : & RoTxn ,
) -> heed ::Result < BTreeMap < String , Vec < String > > > {
Ok ( self
. main
. get ::< _ , Str , SerdeBincode < _ > > ( rtxn , main_key ::USER_DEFINED_SYNONYMS_KEY ) ?
. unwrap_or_default ( ) )
2021-04-07 16:53:57 +08:00
}
2021-04-10 03:56:20 +08:00
pub fn synonyms ( & self , rtxn : & RoTxn ) -> heed ::Result < HashMap < Vec < String > , Vec < Vec < String > > > > {
2021-06-17 00:33:33 +08:00
Ok ( self
. main
. get ::< _ , Str , SerdeBincode < _ > > ( rtxn , main_key ::SYNONYMS_KEY ) ?
. unwrap_or_default ( ) )
2021-04-07 16:53:57 +08:00
}
2021-06-15 17:06:42 +08:00
pub fn words_synonyms < S : AsRef < str > > (
& self ,
rtxn : & RoTxn ,
words : & [ S ] ,
2021-06-17 00:33:33 +08:00
) -> heed ::Result < Option < Vec < Vec < String > > > > {
2021-04-10 03:56:20 +08:00
let words : Vec < _ > = words . iter ( ) . map ( | s | s . as_ref ( ) . to_owned ( ) ) . collect ( ) ;
Ok ( self . synonyms ( rtxn ) ? . remove ( & words ) )
2021-04-07 16:53:57 +08:00
}
2021-02-03 17:30:33 +08:00
/* words prefixes fst */
/// Writes the FST which is the words prefixes dictionnary of the engine.
2021-06-17 00:33:33 +08:00
pub ( crate ) fn put_words_prefixes_fst < A : AsRef < [ u8 ] > > (
& self ,
wtxn : & mut RwTxn ,
fst : & fst ::Set < A > ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , ByteSlice > (
wtxn ,
main_key ::WORDS_PREFIXES_FST_KEY ,
fst . as_fst ( ) . as_bytes ( ) ,
)
2021-02-03 17:30:33 +08:00
}
/// Returns the FST which is the words prefixes dictionnary of the engine.
2021-06-14 22:46:19 +08:00
pub fn words_prefixes_fst < ' t > ( & self , rtxn : & ' t RoTxn ) -> Result < fst ::Set < Cow < ' t , [ u8 ] > > > {
2021-06-15 17:06:42 +08:00
match self . main . get ::< _ , Str , ByteSlice > ( rtxn , main_key ::WORDS_PREFIXES_FST_KEY ) ? {
2021-02-03 17:30:33 +08:00
Some ( bytes ) = > Ok ( fst ::Set ::new ( bytes ) ? . map_data ( Cow ::Borrowed ) ? ) ,
None = > Ok ( fst ::Set ::default ( ) . map_data ( Cow ::Owned ) ? ) ,
}
}
2021-02-18 21:35:14 +08:00
/* word documents count */
/// Returns the number of documents ids associated with the given word,
/// it is much faster than deserializing the bitmap and getting the length of it.
2021-02-18 21:59:37 +08:00
pub fn word_documents_count ( & self , rtxn : & RoTxn , word : & str ) -> heed ::Result < Option < u64 > > {
self . word_docids . remap_data_type ::< RoaringBitmapLenCodec > ( ) . get ( rtxn , word )
2021-02-18 21:35:14 +08:00
}
2021-02-03 17:30:33 +08:00
/* documents */
2023-03-08 16:44:09 +08:00
/// Returns an iterator over the requested documents. The next item will be an error if a document is missing.
pub fn iter_documents < ' a , ' t : ' a > (
& ' a self ,
2020-11-02 18:48:33 +08:00
rtxn : & ' t RoTxn ,
2023-03-08 16:44:09 +08:00
ids : impl IntoIterator < Item = DocumentId > + ' a ,
) -> Result < impl Iterator < Item = Result < ( DocumentId , obkv ::KvReaderU16 < ' t > ) > > + ' a > {
Ok ( ids . into_iter ( ) . map ( move | id | {
2021-06-17 00:33:33 +08:00
let kv = self
. documents
. get ( rtxn , & BEU32 ::new ( id ) ) ?
2022-10-25 03:34:13 +08:00
. ok_or ( UserError ::UnknownInternalDocumentId { document_id : id } ) ? ;
2023-03-08 16:44:09 +08:00
Ok ( ( id , kv ) )
} ) )
2020-10-21 21:55:48 +08:00
}
2023-03-08 16:44:09 +08:00
/// Returns a [`Vec`] of the requested documents. Returns an error if a document is missing.
pub fn documents < ' t > (
2021-05-04 17:23:51 +08:00
& self ,
rtxn : & ' t RoTxn ,
2023-03-08 16:44:09 +08:00
ids : impl IntoIterator < Item = DocumentId > ,
) -> Result < Vec < ( DocumentId , obkv ::KvReaderU16 < ' t > ) > > {
self . iter_documents ( rtxn , ids ) ? . collect ( )
}
2022-06-13 23:59:34 +08:00
2023-03-08 16:44:09 +08:00
/// Returns an iterator over all the documents in the index.
pub fn all_documents < ' a , ' t : ' a > (
& ' a self ,
rtxn : & ' t RoTxn ,
) -> Result < impl Iterator < Item = Result < ( DocumentId , obkv ::KvReaderU16 < ' t > ) > > + ' a > {
self . iter_documents ( rtxn , self . documents_ids ( rtxn ) ? )
2021-05-04 17:23:51 +08:00
}
2021-01-27 21:15:33 +08:00
pub fn facets_distribution < ' a > ( & ' a self , rtxn : & ' a RoTxn ) -> FacetDistribution < ' a > {
2020-12-29 02:08:53 +08:00
FacetDistribution ::new ( rtxn , self )
}
2020-11-02 18:48:33 +08:00
pub fn search < ' a > ( & ' a self , rtxn : & ' a RoTxn ) -> Search < ' a > {
2020-10-21 21:55:48 +08:00
Search ::new ( rtxn , self )
}
2021-03-12 01:32:04 +08:00
/// Returns the index creation time.
2022-02-15 18:41:55 +08:00
pub fn created_at ( & self , rtxn : & RoTxn ) -> Result < OffsetDateTime > {
2021-06-17 00:33:33 +08:00
Ok ( self
. main
2022-02-15 18:41:55 +08:00
. get ::< _ , Str , SerdeJson < OffsetDateTime > > ( rtxn , main_key ::CREATED_AT_KEY ) ?
2021-06-15 17:51:32 +08:00
. ok_or ( InternalError ::DatabaseMissingEntry {
db_name : db_name ::MAIN ,
key : Some ( main_key ::CREATED_AT_KEY ) ,
} ) ? )
2021-03-12 01:32:04 +08:00
}
2021-03-12 01:42:21 +08:00
/// Returns the index last updated time.
2022-02-15 18:41:55 +08:00
pub fn updated_at ( & self , rtxn : & RoTxn ) -> Result < OffsetDateTime > {
2021-06-17 00:33:33 +08:00
Ok ( self
. main
2022-02-15 18:41:55 +08:00
. get ::< _ , Str , SerdeJson < OffsetDateTime > > ( rtxn , main_key ::UPDATED_AT_KEY ) ?
2021-06-15 17:51:32 +08:00
. ok_or ( InternalError ::DatabaseMissingEntry {
db_name : db_name ::MAIN ,
key : Some ( main_key ::UPDATED_AT_KEY ) ,
} ) ? )
2021-03-12 01:32:04 +08:00
}
2021-06-17 00:33:33 +08:00
pub ( crate ) fn set_updated_at (
& self ,
wtxn : & mut RwTxn ,
2022-02-15 18:41:55 +08:00
time : & OffsetDateTime ,
2021-06-17 00:33:33 +08:00
) -> heed ::Result < ( ) > {
2022-10-25 03:34:13 +08:00
self . main . put ::< _ , Str , SerdeJson < OffsetDateTime > > ( wtxn , main_key ::UPDATED_AT_KEY , time )
2021-03-12 01:32:04 +08:00
}
2022-03-16 17:03:18 +08:00
pub fn authorize_typos ( & self , txn : & RoTxn ) -> heed ::Result < bool > {
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
// identify 0 as being false, and anything else as true. The absence of a value is true,
// because by default, we authorize typos.
match self . main . get ::< _ , Str , OwnedType < u8 > > ( txn , main_key ::AUTHORIZE_TYPOS ) ? {
Some ( 0 ) = > Ok ( false ) ,
_ = > Ok ( true ) ,
}
}
pub ( crate ) fn put_authorize_typos ( & self , txn : & mut RwTxn , flag : bool ) -> heed ::Result < ( ) > {
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
// identify 0 as being false, and anything else as true. The absence of a value is true,
// because by default, we authorize typos.
self . main . put ::< _ , Str , OwnedType < u8 > > ( txn , main_key ::AUTHORIZE_TYPOS , & ( flag as u8 ) ) ? ;
Ok ( ( ) )
}
2022-03-21 20:03:06 +08:00
2022-04-01 00:23:12 +08:00
pub fn min_word_len_one_typo ( & self , txn : & RoTxn ) -> heed ::Result < u8 > {
2022-03-21 20:03:06 +08:00
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
// identify 0 as being false, and anything else as true. The absence of a value is true,
// because by default, we authorize typos.
Ok ( self
. main
. get ::< _ , Str , OwnedType < u8 > > ( txn , main_key ::ONE_TYPO_WORD_LEN ) ?
2022-04-01 00:23:12 +08:00
. unwrap_or ( DEFAULT_MIN_WORD_LEN_ONE_TYPO ) )
2022-03-21 20:03:06 +08:00
}
2022-04-01 00:23:12 +08:00
pub ( crate ) fn put_min_word_len_one_typo ( & self , txn : & mut RwTxn , val : u8 ) -> heed ::Result < ( ) > {
2022-03-21 20:03:06 +08:00
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
// identify 0 as being false, and anything else as true. The absence of a value is true,
// because by default, we authorize typos.
self . main . put ::< _ , Str , OwnedType < u8 > > ( txn , main_key ::ONE_TYPO_WORD_LEN , & val ) ? ;
Ok ( ( ) )
}
2022-04-01 00:23:12 +08:00
pub fn min_word_len_two_typos ( & self , txn : & RoTxn ) -> heed ::Result < u8 > {
2022-03-21 20:03:06 +08:00
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
// identify 0 as being false, and anything else as true. The absence of a value is true,
// because by default, we authorize typos.
Ok ( self
. main
. get ::< _ , Str , OwnedType < u8 > > ( txn , main_key ::TWO_TYPOS_WORD_LEN ) ?
2022-04-01 00:23:12 +08:00
. unwrap_or ( DEFAULT_MIN_WORD_LEN_TWO_TYPOS ) )
2022-03-21 20:03:06 +08:00
}
2022-04-01 00:23:12 +08:00
pub ( crate ) fn put_min_word_len_two_typos ( & self , txn : & mut RwTxn , val : u8 ) -> heed ::Result < ( ) > {
2022-03-21 20:03:06 +08:00
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
// identify 0 as being false, and anything else as true. The absence of a value is true,
// because by default, we authorize typos.
self . main . put ::< _ , Str , OwnedType < u8 > > ( txn , main_key ::TWO_TYPOS_WORD_LEN , & val ) ? ;
Ok ( ( ) )
}
2022-03-21 21:03:31 +08:00
/// List the words on which typo are not allowed
2022-05-24 15:15:49 +08:00
pub fn exact_words < ' t > ( & self , txn : & ' t RoTxn ) -> Result < Option < fst ::Set < Cow < ' t , [ u8 ] > > > > {
2022-03-21 21:03:31 +08:00
match self . main . get ::< _ , Str , ByteSlice > ( txn , main_key ::EXACT_WORDS ) ? {
2022-05-24 15:15:49 +08:00
Some ( bytes ) = > Ok ( Some ( fst ::Set ::new ( bytes ) ? . map_data ( Cow ::Borrowed ) ? ) ) ,
None = > Ok ( None ) ,
2022-03-21 21:03:31 +08:00
}
}
pub ( crate ) fn put_exact_words < A : AsRef < [ u8 ] > > (
& self ,
txn : & mut RwTxn ,
words : & fst ::Set < A > ,
) -> Result < ( ) > {
self . main . put ::< _ , Str , ByteSlice > (
txn ,
main_key ::EXACT_WORDS ,
words . as_fst ( ) . as_bytes ( ) ,
) ? ;
Ok ( ( ) )
}
2022-03-23 02:07:59 +08:00
2022-04-05 20:10:22 +08:00
/// Returns the exact attributes: attributes for which typo is disallowed.
2022-03-23 02:07:59 +08:00
pub fn exact_attributes < ' t > ( & self , txn : & ' t RoTxn ) -> Result < Vec < & ' t str > > {
Ok ( self
. main
. get ::< _ , Str , SerdeBincode < Vec < & str > > > ( txn , main_key ::EXACT_ATTRIBUTES ) ?
. unwrap_or_default ( ) )
}
2022-03-30 22:07:59 +08:00
2022-04-05 20:10:22 +08:00
/// Returns the list of exact attributes field ids.
2022-03-25 00:00:29 +08:00
pub fn exact_attributes_ids ( & self , txn : & RoTxn ) -> Result < HashSet < FieldId > > {
let attrs = self . exact_attributes ( txn ) ? ;
let fid_map = self . fields_ids_map ( txn ) ? ;
Ok ( attrs . iter ( ) . filter_map ( | attr | fid_map . id ( attr ) ) . collect ( ) )
}
2022-03-23 02:07:59 +08:00
2022-04-05 20:10:22 +08:00
/// Writes the exact attributes to the database.
2022-03-23 02:07:59 +08:00
pub ( crate ) fn put_exact_attributes ( & self , txn : & mut RwTxn , attrs : & [ & str ] ) -> Result < ( ) > {
self . main . put ::< _ , Str , SerdeBincode < & [ & str ] > > ( txn , main_key ::EXACT_ATTRIBUTES , & attrs ) ? ;
Ok ( ( ) )
}
2022-04-05 20:10:22 +08:00
/// Clears the exact attributes from the store.
2022-11-28 23:02:03 +08:00
pub ( crate ) fn delete_exact_attributes ( & self , txn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( txn , main_key ::EXACT_ATTRIBUTES )
2022-03-23 02:07:59 +08:00
}
2022-06-08 23:28:23 +08:00
pub fn max_values_per_facet ( & self , txn : & RoTxn ) -> heed ::Result < Option < usize > > {
self . main . get ::< _ , Str , OwnedType < usize > > ( txn , main_key ::MAX_VALUES_PER_FACET )
}
pub ( crate ) fn put_max_values_per_facet ( & self , txn : & mut RwTxn , val : usize ) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , OwnedType < usize > > ( txn , main_key ::MAX_VALUES_PER_FACET , & val )
}
pub ( crate ) fn delete_max_values_per_facet ( & self , txn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( txn , main_key ::MAX_VALUES_PER_FACET )
}
2022-06-08 23:31:21 +08:00
2023-06-22 23:13:40 +08:00
pub fn sort_facet_values_by ( & self , txn : & RoTxn ) -> heed ::Result < HashMap < String , OrderBy > > {
let mut orders = self
. main
. get ::< _ , Str , SerdeJson < HashMap < String , OrderBy > > > (
txn ,
main_key ::SORT_FACET_VALUES_BY ,
) ?
. unwrap_or_default ( ) ;
// Insert the default ordering if it is not already overwritten by the user.
orders . entry ( " * " . to_string ( ) ) . or_insert ( OrderBy ::Lexicographic ) ;
Ok ( orders )
}
pub ( crate ) fn put_sort_facet_values_by (
& self ,
txn : & mut RwTxn ,
val : & HashMap < String , OrderBy > ,
) -> heed ::Result < ( ) > {
self . main . put ::< _ , Str , SerdeJson < _ > > ( txn , main_key ::SORT_FACET_VALUES_BY , & val )
}
pub ( crate ) fn delete_sort_facet_values_by ( & self , txn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( txn , main_key ::SORT_FACET_VALUES_BY )
}
2022-06-22 18:00:45 +08:00
pub fn pagination_max_total_hits ( & self , txn : & RoTxn ) -> heed ::Result < Option < usize > > {
self . main . get ::< _ , Str , OwnedType < usize > > ( txn , main_key ::PAGINATION_MAX_TOTAL_HITS )
2022-06-08 23:31:21 +08:00
}
2022-06-22 18:00:45 +08:00
pub ( crate ) fn put_pagination_max_total_hits (
2022-06-08 23:31:21 +08:00
& self ,
txn : & mut RwTxn ,
val : usize ,
) -> heed ::Result < ( ) > {
2022-06-22 18:00:45 +08:00
self . main . put ::< _ , Str , OwnedType < usize > > ( txn , main_key ::PAGINATION_MAX_TOTAL_HITS , & val )
2022-06-08 23:31:21 +08:00
}
2022-06-22 18:00:45 +08:00
pub ( crate ) fn delete_pagination_max_total_hits ( & self , txn : & mut RwTxn ) -> heed ::Result < bool > {
self . main . delete ::< _ , Str > ( txn , main_key ::PAGINATION_MAX_TOTAL_HITS )
2022-06-08 23:31:21 +08:00
}
2022-10-15 03:05:53 +08:00
/* script language docids */
/// Retrieve all the documents ids that correspond with (Script, Language) key, `None` if it is any.
2022-10-17 19:51:04 +08:00
pub fn script_language_documents_ids (
& self ,
rtxn : & RoTxn ,
key : & ( Script , Language ) ,
) -> heed ::Result < Option < RoaringBitmap > > {
2023-10-25 20:14:15 +08:00
Ok ( self . script_language_docids . get ( rtxn , key ) ? )
2022-10-15 03:05:53 +08:00
}
2023-02-02 01:57:43 +08:00
pub fn script_language ( & self , rtxn : & RoTxn ) -> heed ::Result < HashMap < Script , Vec < Language > > > {
let mut script_language : HashMap < Script , Vec < Language > > = HashMap ::new ( ) ;
2023-03-08 02:38:01 +08:00
let mut script_language_doc_count : Vec < ( Script , Language , u64 ) > = Vec ::new ( ) ;
let mut total = 0 ;
2023-02-02 01:57:43 +08:00
for sl in self . script_language_docids . iter ( rtxn ) ? {
let ( ( script , language ) , docids ) = sl ? ;
// keep only Languages that contains at least 1 document.
2023-10-25 20:14:15 +08:00
let remaining_documents_count = docids . len ( ) ;
2023-03-08 02:38:01 +08:00
total + = remaining_documents_count ;
if remaining_documents_count > 0 {
script_language_doc_count . push ( ( script , language , remaining_documents_count ) ) ;
}
}
2023-03-09 17:56:17 +08:00
let threshold = total / 20 ; // 5% (arbitrary)
2023-03-08 02:38:01 +08:00
for ( script , language , count ) in script_language_doc_count {
if count > threshold {
2023-02-02 01:57:43 +08:00
if let Some ( languages ) = script_language . get_mut ( & script ) {
( * languages ) . push ( language ) ;
} else {
script_language . insert ( script , vec! [ language ] ) ;
}
}
}
Ok ( script_language )
}
2020-10-21 21:55:48 +08:00
}
2021-03-31 23:14:23 +08:00
#[ cfg(test) ]
2021-04-15 21:29:37 +08:00
pub ( crate ) mod tests {
2022-12-20 17:37:50 +08:00
use std ::collections ::HashSet ;
2021-04-15 21:29:37 +08:00
use std ::ops ::Deref ;
2022-05-16 21:22:52 +08:00
use big_s ::S ;
2022-08-02 21:13:06 +08:00
use heed ::{ EnvOpenOptions , RwTxn } ;
2022-12-06 18:38:15 +08:00
use maplit ::hashset ;
2021-04-15 21:29:37 +08:00
use tempfile ::TempDir ;
2021-03-31 23:14:23 +08:00
2022-08-02 21:13:06 +08:00
use crate ::documents ::DocumentsBatchReader ;
2022-08-30 23:17:50 +08:00
use crate ::error ::{ Error , InternalError } ;
2022-04-01 00:23:12 +08:00
use crate ::index ::{ DEFAULT_MIN_WORD_LEN_ONE_TYPO , DEFAULT_MIN_WORD_LEN_TWO_TYPOS } ;
2022-12-05 17:26:53 +08:00
use crate ::update ::{
2023-10-25 19:40:46 +08:00
self , IndexDocuments , IndexDocumentsConfig , IndexDocumentsMethod , IndexerConfig , Settings ,
2022-12-05 17:26:53 +08:00
} ;
2023-02-03 01:19:56 +08:00
use crate ::{ db_snap , obkv_to_json , Filter , Index , Search , SearchResult } ;
2021-03-31 23:14:23 +08:00
2021-04-15 21:29:37 +08:00
pub ( crate ) struct TempIndex {
2022-08-02 21:13:06 +08:00
pub inner : Index ,
pub indexer_config : IndexerConfig ,
pub index_documents_config : IndexDocumentsConfig ,
2021-04-15 21:29:37 +08:00
_tempdir : TempDir ,
}
impl Deref for TempIndex {
type Target = Index ;
fn deref ( & self ) -> & Self ::Target {
& self . inner
}
}
impl TempIndex {
2022-08-02 21:13:06 +08:00
/// Creates a temporary index
pub fn new_with_map_size ( size : usize ) -> Self {
2021-04-15 21:29:37 +08:00
let mut options = EnvOpenOptions ::new ( ) ;
2022-08-02 21:13:06 +08:00
options . map_size ( size ) ;
2021-04-15 21:29:37 +08:00
let _tempdir = TempDir ::new_in ( " . " ) . unwrap ( ) ;
let inner = Index ::new ( options , _tempdir . path ( ) ) . unwrap ( ) ;
2022-08-02 21:13:06 +08:00
let indexer_config = IndexerConfig ::default ( ) ;
let index_documents_config = IndexDocumentsConfig ::default ( ) ;
Self { inner , indexer_config , index_documents_config , _tempdir }
}
2023-05-03 20:44:48 +08:00
/// Creates a temporary index, with a default `4096 * 2000` size. This should be enough for
2022-08-02 21:13:06 +08:00
/// most tests.
pub fn new ( ) -> Self {
2023-05-03 20:44:48 +08:00
Self ::new_with_map_size ( 4096 * 2000 )
2022-08-02 21:13:06 +08:00
}
pub fn add_documents_using_wtxn < ' t , R > (
& ' t self ,
wtxn : & mut RwTxn < ' t , '_ > ,
documents : DocumentsBatchReader < R > ,
) -> Result < ( ) , crate ::error ::Error >
where
R : std ::io ::Read + std ::io ::Seek ,
{
let builder = IndexDocuments ::new (
wtxn ,
2022-10-10 21:28:03 +08:00
self ,
2022-08-02 21:13:06 +08:00
& self . indexer_config ,
self . index_documents_config . clone ( ) ,
| _ | ( ) ,
2022-10-05 23:41:07 +08:00
| | false ,
2022-08-02 21:13:06 +08:00
)
. unwrap ( ) ;
let ( builder , user_error ) = builder . add_documents ( documents ) . unwrap ( ) ;
user_error ? ;
builder . execute ( ) ? ;
Ok ( ( ) )
}
pub fn add_documents < R > (
& self ,
documents : DocumentsBatchReader < R > ,
) -> Result < ( ) , crate ::error ::Error >
where
R : std ::io ::Read + std ::io ::Seek ,
{
let mut wtxn = self . write_txn ( ) . unwrap ( ) ;
self . add_documents_using_wtxn ( & mut wtxn , documents ) ? ;
wtxn . commit ( ) . unwrap ( ) ;
Ok ( ( ) )
}
pub fn update_settings (
& self ,
update : impl Fn ( & mut Settings ) ,
) -> Result < ( ) , crate ::error ::Error > {
let mut wtxn = self . write_txn ( ) . unwrap ( ) ;
self . update_settings_using_wtxn ( & mut wtxn , update ) ? ;
wtxn . commit ( ) . unwrap ( ) ;
Ok ( ( ) )
}
pub fn update_settings_using_wtxn < ' t > (
& ' t self ,
wtxn : & mut RwTxn < ' t , '_ > ,
update : impl Fn ( & mut Settings ) ,
) -> Result < ( ) , crate ::error ::Error > {
let mut builder = update ::Settings ::new ( wtxn , & self . inner , & self . indexer_config ) ;
update ( & mut builder ) ;
2022-10-05 23:41:07 +08:00
builder . execute ( drop , | | false ) ? ;
2022-08-02 21:13:06 +08:00
Ok ( ( ) )
2021-04-15 21:29:37 +08:00
}
2022-12-19 16:47:29 +08:00
2023-10-25 20:42:09 +08:00
pub fn delete_documents ( & self , external_document_ids : Vec < String > ) {
2022-12-19 16:47:29 +08:00
let mut wtxn = self . write_txn ( ) . unwrap ( ) ;
2023-10-25 19:40:46 +08:00
let builder = IndexDocuments ::new (
& mut wtxn ,
self ,
& self . indexer_config ,
self . index_documents_config . clone ( ) ,
| _ | ( ) ,
| | false ,
)
. unwrap ( ) ;
2023-10-25 20:42:09 +08:00
let ( builder , user_error ) = builder . remove_documents ( external_document_ids ) . unwrap ( ) ;
2023-10-25 19:40:46 +08:00
user_error . unwrap ( ) ;
builder . execute ( ) . unwrap ( ) ;
2022-12-19 16:47:29 +08:00
wtxn . commit ( ) . unwrap ( ) ;
}
2023-10-25 20:42:09 +08:00
pub fn delete_document ( & self , external_document_id : & str ) {
self . delete_documents ( vec! [ external_document_id . to_string ( ) ] )
}
2021-04-15 21:29:37 +08:00
}
2022-08-30 23:17:50 +08:00
#[ test ]
fn aborting_indexation ( ) {
use std ::sync ::atomic ::AtomicBool ;
use std ::sync ::atomic ::Ordering ::Relaxed ;
let index = TempIndex ::new ( ) ;
let mut wtxn = index . inner . write_txn ( ) . unwrap ( ) ;
let should_abort = AtomicBool ::new ( false ) ;
let builder = IndexDocuments ::new (
& mut wtxn ,
& index . inner ,
& index . indexer_config ,
index . index_documents_config . clone ( ) ,
| _ | ( ) ,
| | should_abort . load ( Relaxed ) ,
)
. unwrap ( ) ;
let ( builder , user_error ) = builder
. add_documents ( documents! ( [
{ " id " : 1 , " name " : " kevin " } ,
{ " id " : 2 , " name " : " bob " , " age " : 20 } ,
{ " id " : 2 , " name " : " bob " , " age " : 20 } ,
] ) )
. unwrap ( ) ;
user_error . unwrap ( ) ;
should_abort . store ( true , Relaxed ) ;
let err = builder . execute ( ) . unwrap_err ( ) ;
assert! ( matches! ( err , Error ::InternalError ( InternalError ::AbortedIndexation ) ) ) ;
}
2021-04-01 15:07:16 +08:00
#[ test ]
2021-06-17 21:16:20 +08:00
fn initial_field_distribution ( ) {
2022-08-02 21:13:06 +08:00
let index = TempIndex ::new ( ) ;
index
. add_documents ( documents! ( [
{ " id " : 1 , " name " : " kevin " } ,
{ " id " : 2 , " name " : " bob " , " age " : 20 } ,
{ " id " : 2 , " name " : " bob " , " age " : 20 } ,
] ) )
. unwrap ( ) ;
2021-03-31 23:14:23 +08:00
2022-08-03 22:24:28 +08:00
db_snap! ( index , field_distribution , 1 ) ;
db_snap! ( index , word_docids ,
@ r ###"
1 [ 0 , ]
2 [ 1 , ]
20 [ 1 , ]
bob [ 1 , ]
kevin [ 0 , ]
" ###
) ;
db_snap! ( index , field_distribution ) ;
db_snap! ( index , field_distribution ,
2023-05-03 20:11:20 +08:00
@ r ###"
2023-06-14 21:57:31 +08:00
age 1 |
id 2 |
name 2 |
2023-05-03 20:11:20 +08:00
" ###
2021-06-17 00:33:33 +08:00
) ;
2021-06-17 23:05:34 +08:00
2022-08-03 22:24:28 +08:00
// snapshot_index!(&index, "1", include: "^field_distribution$");
2021-06-17 23:05:34 +08:00
// we add all the documents a second time. we are supposed to get the same
// field_distribution in the end
2022-08-02 21:13:06 +08:00
index
. add_documents ( documents! ( [
{ " id " : 1 , " name " : " kevin " } ,
{ " id " : 2 , " name " : " bob " , " age " : 20 } ,
{ " id " : 2 , " name " : " bob " , " age " : 20 } ,
] ) )
. unwrap ( ) ;
2021-06-17 23:05:34 +08:00
2022-08-03 22:24:28 +08:00
db_snap! ( index , field_distribution ,
@ r ###"
2023-06-14 21:57:31 +08:00
age 1 |
id 2 |
name 2 |
2023-05-03 20:11:20 +08:00
" ###
2021-06-17 23:05:34 +08:00
) ;
// then we update a document by removing one field and another by adding one field
2022-08-02 21:13:06 +08:00
index
. add_documents ( documents! ( [
{ " id " : 1 , " name " : " kevin " , " has_dog " : true } ,
{ " id " : 2 , " name " : " bob " }
] ) )
. unwrap ( ) ;
2021-06-17 23:05:34 +08:00
2022-08-03 22:24:28 +08:00
db_snap! ( index , field_distribution ,
@ r ###"
2023-06-14 21:57:31 +08:00
has_dog 1 |
id 2 |
name 2 |
2023-05-03 20:11:20 +08:00
" ###
2021-06-17 23:05:34 +08:00
) ;
2021-03-31 23:14:23 +08:00
}
2022-03-31 15:54:49 +08:00
#[ test ]
fn put_and_retrieve_disable_typo ( ) {
let index = TempIndex ::new ( ) ;
let mut txn = index . write_txn ( ) . unwrap ( ) ;
// default value is true
assert! ( index . authorize_typos ( & txn ) . unwrap ( ) ) ;
// set to false
index . put_authorize_typos ( & mut txn , false ) . unwrap ( ) ;
txn . commit ( ) . unwrap ( ) ;
let txn = index . read_txn ( ) . unwrap ( ) ;
assert! ( ! index . authorize_typos ( & txn ) . unwrap ( ) ) ;
}
2022-03-31 19:50:18 +08:00
#[ test ]
fn set_min_word_len_for_typos ( ) {
let index = TempIndex ::new ( ) ;
let mut txn = index . write_txn ( ) . unwrap ( ) ;
2022-04-01 00:23:12 +08:00
assert_eq! ( index . min_word_len_one_typo ( & txn ) . unwrap ( ) , DEFAULT_MIN_WORD_LEN_ONE_TYPO ) ;
assert_eq! ( index . min_word_len_two_typos ( & txn ) . unwrap ( ) , DEFAULT_MIN_WORD_LEN_TWO_TYPOS ) ;
2022-03-31 19:50:18 +08:00
2022-04-01 00:23:12 +08:00
index . put_min_word_len_one_typo ( & mut txn , 3 ) . unwrap ( ) ;
index . put_min_word_len_two_typos ( & mut txn , 15 ) . unwrap ( ) ;
2022-03-31 19:50:18 +08:00
txn . commit ( ) . unwrap ( ) ;
let txn = index . read_txn ( ) . unwrap ( ) ;
2022-04-01 00:23:12 +08:00
assert_eq! ( index . min_word_len_one_typo ( & txn ) . unwrap ( ) , 3 ) ;
assert_eq! ( index . min_word_len_two_typos ( & txn ) . unwrap ( ) , 15 ) ;
2022-03-31 19:50:18 +08:00
}
2022-05-16 21:22:52 +08:00
#[ test ]
fn add_documents_and_set_searchable_fields ( ) {
2022-08-02 21:13:06 +08:00
let index = TempIndex ::new ( ) ;
index
. add_documents ( documents! ( [
{ " id " : 1 , " doggo " : " kevin " } ,
{ " id " : 2 , " doggo " : { " name " : " bob " , " age " : 20 } } ,
{ " id " : 3 , " name " : " jean " , " age " : 25 } ,
] ) )
. unwrap ( ) ;
index
. update_settings ( | settings | {
settings . set_searchable_fields ( vec! [ S ( " doggo " ) , S ( " name " ) ] ) ;
} )
. unwrap ( ) ;
2022-05-16 21:22:52 +08:00
// ensure we get the right real searchable fields + user defined searchable fields
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let real = index . searchable_fields ( & rtxn ) . unwrap ( ) . unwrap ( ) ;
assert_eq! ( real , & [ " doggo " , " name " , " doggo.name " , " doggo.age " ] ) ;
let user_defined = index . user_defined_searchable_fields ( & rtxn ) . unwrap ( ) . unwrap ( ) ;
assert_eq! ( user_defined , & [ " doggo " , " name " ] ) ;
}
#[ test ]
fn set_searchable_fields_and_add_documents ( ) {
2022-08-02 21:13:06 +08:00
let index = TempIndex ::new ( ) ;
2022-05-16 21:22:52 +08:00
2022-08-02 21:13:06 +08:00
index
. update_settings ( | settings | {
settings . set_searchable_fields ( vec! [ S ( " doggo " ) , S ( " name " ) ] ) ;
} )
. unwrap ( ) ;
2022-05-16 21:22:52 +08:00
// ensure we get the right real searchable fields + user defined searchable fields
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let real = index . searchable_fields ( & rtxn ) . unwrap ( ) . unwrap ( ) ;
assert_eq! ( real , & [ " doggo " , " name " ] ) ;
let user_defined = index . user_defined_searchable_fields ( & rtxn ) . unwrap ( ) . unwrap ( ) ;
assert_eq! ( user_defined , & [ " doggo " , " name " ] ) ;
2022-08-02 21:13:06 +08:00
index
. add_documents ( documents! ( [
{ " id " : 1 , " doggo " : " kevin " } ,
{ " id " : 2 , " doggo " : { " name " : " bob " , " age " : 20 } } ,
{ " id " : 3 , " name " : " jean " , " age " : 25 } ,
] ) )
. unwrap ( ) ;
2022-05-16 21:22:52 +08:00
// ensure we get the right real searchable fields + user defined searchable fields
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let real = index . searchable_fields ( & rtxn ) . unwrap ( ) . unwrap ( ) ;
assert_eq! ( real , & [ " doggo " , " name " , " doggo.name " , " doggo.age " ] ) ;
let user_defined = index . user_defined_searchable_fields ( & rtxn ) . unwrap ( ) . unwrap ( ) ;
assert_eq! ( user_defined , & [ " doggo " , " name " ] ) ;
}
2022-12-05 17:26:53 +08:00
2023-02-03 01:19:56 +08:00
#[ test ]
fn test_basic_geo_bounding_box ( ) {
let index = TempIndex ::new ( ) ;
index
. update_settings ( | settings | {
settings . set_filterable_fields ( hashset! { S ( " _geo " ) } ) ;
} )
. unwrap ( ) ;
index
. add_documents ( documents! ( [
2023-08-08 22:29:25 +08:00
{ " id " : 0 , " _geo " : { " lat " : " 0 " , " lng " : " 0 " } } ,
{ " id " : 1 , " _geo " : { " lat " : 0 , " lng " : " -175 " } } ,
{ " id " : 2 , " _geo " : { " lat " : " 0 " , " lng " : 175 } } ,
2023-02-03 01:19:56 +08:00
{ " id " : 3 , " _geo " : { " lat " : 85 , " lng " : 0 } } ,
2023-08-08 22:29:25 +08:00
{ " id " : 4 , " _geo " : { " lat " : " -85 " , " lng " : " 0 " } } ,
2023-02-03 01:19:56 +08:00
] ) )
. unwrap ( ) ;
// ensure we get the right real searchable fields + user defined searchable fields
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let mut search = index . search ( & rtxn ) ;
// exact match a document
let search_result = search
2023-02-06 23:50:27 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([0, 0], [0, 0]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
. unwrap ( ) ;
insta ::assert_debug_snapshot! ( search_result . candidates , @ " RoaringBitmap<[0]> " ) ;
// match a document in the middle of the rectangle
let search_result = search
2023-03-29 00:26:18 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([10, 10], [-10, -10]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
. unwrap ( ) ;
insta ::assert_debug_snapshot! ( search_result . candidates , @ " RoaringBitmap<[0]> " ) ;
// select everything
let search_result = search
2023-03-29 00:26:18 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([90, 180], [-90, -180]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
. unwrap ( ) ;
insta ::assert_debug_snapshot! ( search_result . candidates , @ " RoaringBitmap<[0, 1, 2, 3, 4]> " ) ;
// go on the edge of the longitude
let search_result = search
2023-03-29 00:26:18 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([0, -170], [0, 180]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
. unwrap ( ) ;
insta ::assert_debug_snapshot! ( search_result . candidates , @ " RoaringBitmap<[1]> " ) ;
// go on the other edge of the longitude
let search_result = search
2023-03-29 00:26:18 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([0, -180], [0, 170]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
. unwrap ( ) ;
insta ::assert_debug_snapshot! ( search_result . candidates , @ " RoaringBitmap<[2]> " ) ;
// wrap around the longitude
let search_result = search
2023-03-29 00:26:18 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([0, -170], [0, 170]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
. unwrap ( ) ;
insta ::assert_debug_snapshot! ( search_result . candidates , @ " RoaringBitmap<[1, 2]> " ) ;
// go on the edge of the latitude
let search_result = search
2023-02-06 23:50:27 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([90, 0], [80, 0]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
. unwrap ( ) ;
insta ::assert_debug_snapshot! ( search_result . candidates , @ " RoaringBitmap<[3]> " ) ;
// go on the edge of the latitude
let search_result = search
2023-02-06 23:50:27 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([-80, 0], [-90, 0]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
. unwrap ( ) ;
insta ::assert_debug_snapshot! ( search_result . candidates , @ " RoaringBitmap<[4]> " ) ;
2023-02-07 01:07:00 +08:00
// the requests that don't make sense
2023-02-07 00:50:47 +08:00
2023-02-03 01:19:56 +08:00
// try to wrap around the latitude
2023-02-07 00:50:47 +08:00
let error = search
2023-02-06 23:50:27 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([-80, 0], [80, 0]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
2023-02-07 00:50:47 +08:00
. unwrap_err ( ) ;
2023-03-29 00:26:18 +08:00
insta ::assert_display_snapshot! (
error ,
@ r ###"
2023-02-07 00:50:47 +08:00
The top latitude ` - 80 ` is below the bottom latitude ` 80 ` .
32 :33 _geoBoundingBox ( [ - 80 , 0 ] , [ 80 , 0 ] )
2023-03-29 00:26:18 +08:00
" ###
) ;
2023-02-03 01:19:56 +08:00
// send a top latitude lower than the bottow latitude
2023-02-07 00:50:47 +08:00
let error = search
2023-02-06 23:50:27 +08:00
. filter ( Filter ::from_str ( " _geoBoundingBox([-10, 0], [10, 0]) " ) . unwrap ( ) . unwrap ( ) )
2023-02-03 01:19:56 +08:00
. execute ( )
2023-02-07 00:50:47 +08:00
. unwrap_err ( ) ;
2023-03-29 00:26:18 +08:00
insta ::assert_display_snapshot! (
error ,
@ r ###"
2023-02-07 00:50:47 +08:00
The top latitude ` - 10 ` is below the bottom latitude ` 10 ` .
32 :33 _geoBoundingBox ( [ - 10 , 0 ] , [ 10 , 0 ] )
2023-03-29 00:26:18 +08:00
" ###
) ;
2023-02-03 01:19:56 +08:00
}
2022-12-05 17:26:53 +08:00
#[ test ]
fn replace_documents_external_ids_and_soft_deletion_check ( ) {
use big_s ::S ;
use maplit ::hashset ;
2023-10-25 20:49:25 +08:00
let index = TempIndex ::new ( ) ;
2022-12-05 17:26:53 +08:00
index
. update_settings ( | settings | {
settings . set_primary_key ( " id " . to_owned ( ) ) ;
settings . set_filterable_fields ( hashset! { S ( " doggo " ) } ) ;
} )
. unwrap ( ) ;
let mut docs = vec! [ ] ;
for i in 0 .. 4 {
docs . push ( serde_json ::json! (
{ " id " : i , " doggo " : i }
) ) ;
}
index . add_documents ( documents! ( docs ) ) . unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, 2, 3, ] " ) ;
db_snap! ( index , external_documents_ids , 1 , @ r ###"
soft :
hard :
0 0
1 1
2 2
3 3
" ###);
db_snap! ( index , facet_id_f64_docids , 1 , @ r ###"
1 0 0 1 [ 0 , ]
1 0 1 1 [ 1 , ]
1 0 2 1 [ 2 , ]
1 0 3 1 [ 3 , ]
" ###);
let mut docs = vec! [ ] ;
for i in 0 .. 3 {
docs . push ( serde_json ::json! (
{ " id " : i , " doggo " : i + 1 }
) ) ;
}
index . add_documents ( documents! ( docs ) ) . unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [3, 4, 5, 6, ] " ) ;
db_snap! ( index , external_documents_ids , 2 , @ r ###"
soft :
hard :
0 4
1 5
2 6
3 3
" ###);
db_snap! ( index , facet_id_f64_docids , 2 , @ r ###"
1 0 0 1 [ 0 , ]
1 0 1 1 [ 1 , 4 , ]
1 0 2 1 [ 2 , 5 , ]
1 0 3 1 [ 3 , 6 , ]
" ###);
index
. add_documents ( documents! ( [ { " id " : 3 , " doggo " : 4 } , { " id " : 3 , " doggo " : 5 } , { " id " : 3 , " doggo " : 4 } ] ) )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [4, 5, 6, 7, ] " ) ;
db_snap! ( index , external_documents_ids , 3 , @ r ###"
soft :
3 7
hard :
0 4
1 5
2 6
3 3
" ###);
db_snap! ( index , facet_id_f64_docids , 3 , @ r ###"
1 0 0 1 [ 0 , ]
1 0 1 1 [ 1 , 4 , ]
1 0 2 1 [ 2 , 5 , ]
1 0 3 1 [ 3 , 6 , ]
1 0 4 1 [ 7 , ]
" ###);
index
. update_settings ( | settings | {
settings . set_distinct_field ( " id " . to_owned ( ) ) ;
} )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [4, 5, 6, 7, ] " ) ;
db_snap! ( index , external_documents_ids , 3 , @ r ###"
soft :
hard :
0 4
1 5
2 6
2022-12-06 18:38:15 +08:00
3 7
2022-12-05 17:26:53 +08:00
" ###);
db_snap! ( index , facet_id_f64_docids , 3 , @ r ###"
0 0 0 1 [ 4 , ]
0 0 1 1 [ 5 , ]
0 0 2 1 [ 6 , ]
0 0 3 1 [ 7 , ]
1 0 1 1 [ 4 , ]
1 0 2 1 [ 5 , ]
1 0 3 1 [ 6 , ]
1 0 4 1 [ 7 , ]
" ###);
}
#[ test ]
fn replace_documents_in_batches_external_ids_and_soft_deletion_check ( ) {
use big_s ::S ;
use maplit ::hashset ;
2023-10-25 20:49:25 +08:00
let index = TempIndex ::new ( ) ;
2022-12-05 17:26:53 +08:00
index
. update_settings ( | settings | {
settings . set_primary_key ( " id " . to_owned ( ) ) ;
settings . set_filterable_fields ( hashset! { S ( " doggo " ) } ) ;
} )
. unwrap ( ) ;
let add_documents = | index : & TempIndex , docs : Vec < Vec < serde_json ::Value > > | {
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
let mut builder = IndexDocuments ::new (
& mut wtxn ,
index ,
& index . indexer_config ,
index . index_documents_config . clone ( ) ,
| _ | ( ) ,
| | false ,
)
. unwrap ( ) ;
for docs in docs {
( builder , _ ) = builder . add_documents ( documents! ( docs ) ) . unwrap ( ) ;
}
builder . execute ( ) . unwrap ( ) ;
wtxn . commit ( ) . unwrap ( ) ;
} ;
// First Batch
{
let mut docs1 = vec! [ ] ;
for i in 0 .. 4 {
docs1 . push ( serde_json ::json! (
{ " id " : i , " doggo " : i }
) ) ;
}
add_documents ( & index , vec! [ docs1 ] ) ;
db_snap! ( index , documents_ids , @ " [0, 1, 2, 3, ] " ) ;
db_snap! ( index , external_documents_ids , 1 , @ r ###"
soft :
hard :
0 0
1 1
2 2
3 3
" ###);
db_snap! ( index , facet_id_f64_docids , 1 , @ r ###"
1 0 0 1 [ 0 , ]
1 0 1 1 [ 1 , ]
1 0 2 1 [ 2 , ]
1 0 3 1 [ 3 , ]
" ###);
}
// Second Batch: replace the documents with soft-deletion
{
let mut docs1 = vec! [ ] ;
for i in 0 .. 3 {
docs1 . push ( serde_json ::json! (
{ " id " : i , " doggo " : i + 1 }
) ) ;
}
let mut docs2 = vec! [ ] ;
for i in 0 .. 3 {
docs2 . push ( serde_json ::json! (
{ " id " : i , " doggo " : i }
) ) ;
}
add_documents ( & index , vec! [ docs1 , docs2 ] ) ;
db_snap! ( index , documents_ids , @ " [3, 4, 5, 6, ] " ) ;
db_snap! ( index , external_documents_ids , 1 , @ r ###"
soft :
hard :
0 4
1 5
2 6
3 3
" ###);
db_snap! ( index , facet_id_f64_docids , 1 , @ r ###"
1 0 0 1 [ 0 , 4 , ]
1 0 1 1 [ 1 , 5 , ]
1 0 2 1 [ 2 , 6 , ]
1 0 3 1 [ 3 , ]
" ###);
}
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let ( _docid , obkv ) = index . documents ( & rtxn , [ 3 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 3 ) ,
" doggo " : Number ( 3 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 4 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 0 ) ,
" doggo " : Number ( 0 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 5 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 1 ) ,
" doggo " : Number ( 1 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 6 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 2 ) ,
" doggo " : Number ( 2 ) ,
}
" ###);
drop ( rtxn ) ;
// Third Batch: replace the documents with soft-deletion again
{
let mut docs1 = vec! [ ] ;
for i in 0 .. 3 {
docs1 . push ( serde_json ::json! (
{ " id " : i , " doggo " : i + 1 }
) ) ;
}
let mut docs2 = vec! [ ] ;
for i in 0 .. 4 {
docs2 . push ( serde_json ::json! (
{ " id " : i , " doggo " : i }
) ) ;
}
add_documents ( & index , vec! [ docs1 , docs2 ] ) ;
db_snap! ( index , documents_ids , @ " [3, 7, 8, 9, ] " ) ;
db_snap! ( index , external_documents_ids , 1 , @ r ###"
soft :
hard :
0 7
1 8
2 9
3 3
" ###);
db_snap! ( index , facet_id_f64_docids , 1 , @ r ###"
1 0 0 1 [ 0 , 4 , 7 , ]
1 0 1 1 [ 1 , 5 , 8 , ]
1 0 2 1 [ 2 , 6 , 9 , ]
1 0 3 1 [ 3 , ]
" ###);
}
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let ( _docid , obkv ) = index . documents ( & rtxn , [ 3 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 3 ) ,
" doggo " : Number ( 3 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 7 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 0 ) ,
" doggo " : Number ( 0 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 8 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 1 ) ,
" doggo " : Number ( 1 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 9 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 2 ) ,
" doggo " : Number ( 2 ) ,
}
" ###);
drop ( rtxn ) ;
// Fourth Batch: replace the documents without soft-deletion
{
let mut docs1 = vec! [ ] ;
for i in 0 .. 3 {
docs1 . push ( serde_json ::json! (
{ " id " : i , " doggo " : i + 2 }
) ) ;
}
let mut docs2 = vec! [ ] ;
for i in 0 .. 1 {
docs2 . push ( serde_json ::json! (
{ " id " : i , " doggo " : i }
) ) ;
}
add_documents ( & index , vec! [ docs1 , docs2 ] ) ;
db_snap! ( index , documents_ids , @ " [3, 10, 11, 12, ] " ) ;
db_snap! ( index , external_documents_ids , 1 , @ r ###"
soft :
hard :
0 10
1 11
2 12
3 3
" ###);
2023-10-25 20:14:15 +08:00
2022-12-05 17:26:53 +08:00
db_snap! ( index , facet_id_f64_docids , 1 , @ r ###"
1 0 0 1 [ 10 , ]
1 0 3 1 [ 3 , 11 , ]
1 0 4 1 [ 12 , ]
" ###);
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let ( _docid , obkv ) = index . documents ( & rtxn , [ 3 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 3 ) ,
" doggo " : Number ( 3 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 10 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 0 ) ,
" doggo " : Number ( 0 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 11 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 1 ) ,
" doggo " : Number ( 3 ) ,
}
" ###);
let ( _docid , obkv ) = index . documents ( & rtxn , [ 12 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" id " : Number ( 2 ) ,
" doggo " : Number ( 4 ) ,
}
" ###);
drop ( rtxn ) ;
}
}
2022-12-06 18:38:15 +08:00
#[ test ]
fn bug_3021_first ( ) {
// https://github.com/meilisearch/meilisearch/issues/3021
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . update_method = IndexDocumentsMethod ::ReplaceDocuments ;
index
. update_settings ( | settings | {
settings . set_primary_key ( " primary_key " . to_owned ( ) ) ;
} )
. unwrap ( ) ;
index
. add_documents ( documents! ( [
{ " primary_key " : 38 } ,
{ " primary_key " : 34 }
] ) )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, ] " ) ;
db_snap! ( index , external_documents_ids , 1 , @ r ###"
soft :
hard :
34 1
38 0
" ###);
2022-12-19 16:47:29 +08:00
index . delete_document ( " 34 " ) ;
2022-12-06 18:38:15 +08:00
db_snap! ( index , documents_ids , @ " [0, ] " ) ;
db_snap! ( index , external_documents_ids , 2 , @ r ###"
soft :
hard :
34 1
38 0
" ###);
index
. update_settings ( | s | {
s . set_searchable_fields ( vec! [ ] ) ;
} )
. unwrap ( ) ;
// The key point of the test is to verify that the external documents ids
// do not contain any entry for previously soft-deleted document ids
db_snap! ( index , documents_ids , @ " [0, ] " ) ;
db_snap! ( index , external_documents_ids , 3 , @ r ###"
soft :
hard :
38 0
" ###);
// So that this document addition works correctly now.
// It would be wrongly interpreted as a replacement before
index . add_documents ( documents! ( { " primary_key " : 34 } ) ) . unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, ] " ) ;
db_snap! ( index , external_documents_ids , 4 , @ r ###"
soft :
hard :
34 1
38 0
" ###);
// We do the test again, but deleting the document with id 0 instead of id 1 now
2022-12-19 16:47:29 +08:00
index . delete_document ( " 38 " ) ;
2022-12-06 18:38:15 +08:00
db_snap! ( index , documents_ids , @ " [1, ] " ) ;
db_snap! ( index , external_documents_ids , 5 , @ r ###"
soft :
hard :
34 1
38 0
" ###);
index
. update_settings ( | s | {
s . set_searchable_fields ( vec! [ " primary_key " . to_owned ( ) ] ) ;
} )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [1, ] " ) ;
db_snap! ( index , external_documents_ids , 6 , @ r ###"
soft :
hard :
34 1
" ###);
// And adding lots of documents afterwards instead of just one.
// These extra subtests don't add much, but it's better than nothing.
index . add_documents ( documents! ( [ { " primary_key " : 38 } , { " primary_key " : 39 } , { " primary_key " : 41 } , { " primary_key " : 40 } , { " primary_key " : 41 } , { " primary_key " : 42 } ] ) ) . unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, 2, 3, 4, 5, ] " ) ;
db_snap! ( index , external_documents_ids , 7 , @ r ###"
soft :
hard :
34 1
38 0
39 2
40 4
41 3
42 5
" ###);
}
#[ test ]
fn bug_3021_second ( ) {
// https://github.com/meilisearch/meilisearch/issues/3021
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . update_method = IndexDocumentsMethod ::UpdateDocuments ;
index
. update_settings ( | settings | {
settings . set_primary_key ( " primary_key " . to_owned ( ) ) ;
} )
. unwrap ( ) ;
index
. add_documents ( documents! ( [
{ " primary_key " : 30 } ,
{ " primary_key " : 34 }
] ) )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, ] " ) ;
db_snap! ( index , external_documents_ids , 1 , @ r ###"
soft :
hard :
30 0
34 1
" ###);
2022-12-19 16:47:29 +08:00
index . delete_document ( " 34 " ) ;
2022-12-06 18:38:15 +08:00
db_snap! ( index , documents_ids , @ " [0, ] " ) ;
db_snap! ( index , external_documents_ids , 2 , @ r ###"
soft :
hard :
30 0
34 1
" ###);
index
. update_settings ( | s | {
s . set_searchable_fields ( vec! [ ] ) ;
} )
. unwrap ( ) ;
// The key point of the test is to verify that the external documents ids
// do not contain any entry for previously soft-deleted document ids
db_snap! ( index , documents_ids , @ " [0, ] " ) ;
db_snap! ( index , external_documents_ids , 3 , @ r ###"
soft :
hard :
30 0
" ###);
// So that when we add a new document
index . add_documents ( documents! ( { " primary_key " : 35 , " b " : 2 } ) ) . unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, ] " ) ;
// The external documents ids don't have several external ids pointing to the same
// internal document id
db_snap! ( index , external_documents_ids , 4 , @ r ###"
soft :
hard :
30 0
35 1
" ###);
// And when we add 34 again, we don't replace document 35
index . add_documents ( documents! ( { " primary_key " : 34 , " a " : 1 } ) ) . unwrap ( ) ;
// And document 35 still exists, is not deleted
db_snap! ( index , documents_ids , @ " [0, 1, 2, ] " ) ;
db_snap! ( index , external_documents_ids , 5 , @ r ###"
soft :
hard :
30 0
34 2
35 1
" ###);
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let ( _docid , obkv ) = index . documents ( & rtxn , [ 0 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 , 2 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" primary_key " : Number ( 30 ) ,
}
" ###);
// Furthermore, when we retrieve document 34, it is not the result of merging 35 with 34
let ( _docid , obkv ) = index . documents ( & rtxn , [ 2 ] ) . unwrap ( ) [ 0 ] ;
let json = obkv_to_json ( & [ 0 , 1 , 2 ] , & index . fields_ids_map ( & rtxn ) . unwrap ( ) , obkv ) . unwrap ( ) ;
insta ::assert_debug_snapshot! ( json , @ r ###"
{
" primary_key " : Number ( 34 ) ,
" a " : Number ( 1 ) ,
}
" ###);
drop ( rtxn ) ;
// Add new documents again
index
. add_documents (
documents! ( [ { " primary_key " : 37 } , { " primary_key " : 38 } , { " primary_key " : 39 } ] ) ,
)
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, 2, 3, 4, 5, ] " ) ;
db_snap! ( index , external_documents_ids , 6 , @ r ###"
soft :
hard :
30 0
34 2
35 1
37 3
38 4
39 5
" ###);
}
2022-12-09 02:18:25 +08:00
#[ test ]
fn bug_3021_third ( ) {
// https://github.com/meilisearch/meilisearch/issues/3021
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . update_method = IndexDocumentsMethod ::UpdateDocuments ;
index
. update_settings ( | settings | {
settings . set_primary_key ( " primary_key " . to_owned ( ) ) ;
} )
. unwrap ( ) ;
index
. add_documents ( documents! ( [
{ " primary_key " : 3 } ,
{ " primary_key " : 4 } ,
{ " primary_key " : 5 }
] ) )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, 2, ] " ) ;
db_snap! ( index , external_documents_ids , 1 , @ r ###"
soft :
hard :
3 0
4 1
5 2
" ###);
2022-12-19 16:47:29 +08:00
index . delete_document ( " 3 " ) ;
2022-12-09 02:18:25 +08:00
db_snap! ( index , documents_ids , @ " [1, 2, ] " ) ;
db_snap! ( index , external_documents_ids , 2 , @ r ###"
soft :
hard :
3 0
4 1
5 2
" ###);
index . add_documents ( documents! ( [ { " primary_key " : " 4 " , " a " : 2 } ] ) ) . unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [2, 3, ] " ) ;
db_snap! ( index , external_documents_ids , 2 , @ r ###"
soft :
hard :
4 3
5 2
" ###);
index
. add_documents ( documents! ( [
{ " primary_key " : " 3 " } ,
] ) )
. unwrap ( ) ;
2022-12-12 19:42:55 +08:00
db_snap! ( index , documents_ids , @ " [0, 2, 3, ] " ) ;
db_snap! ( index , external_documents_ids , 2 , @ r ###"
soft :
hard :
3 0
4 3
5 2
" ###);
2022-12-09 02:18:25 +08:00
}
2022-12-20 17:37:50 +08:00
#[ test ]
fn bug_3021_fourth ( ) {
// https://github.com/meilisearch/meilisearch/issues/3021
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . update_method = IndexDocumentsMethod ::UpdateDocuments ;
index
. update_settings ( | settings | {
settings . set_primary_key ( " primary_key " . to_owned ( ) ) ;
} )
. unwrap ( ) ;
index
. add_documents ( documents! ( [
{ " primary_key " : 11 } ,
{ " primary_key " : 4 } ,
] ) )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, ] " ) ;
db_snap! ( index , external_documents_ids , @ r ###"
soft :
hard :
11 0
4 1
" ###);
index
. add_documents ( documents! ( [
{ " primary_key " : 4 , " a " : 0 } ,
{ " primary_key " : 1 } ,
] ) )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 2, 3, ] " ) ;
db_snap! ( index , external_documents_ids , @ r ###"
soft :
hard :
1 3
11 0
4 2
" ###);
2023-10-25 20:49:25 +08:00
index . delete_documents ( Default ::default ( ) ) ;
2022-12-20 17:37:50 +08:00
db_snap! ( index , documents_ids , @ " [0, 2, 3, ] " ) ;
db_snap! ( index , external_documents_ids , @ r ###"
soft :
hard :
1 3
11 0
4 2
" ###);
index
. add_documents ( documents! ( [
{ " primary_key " : 4 , " a " : 1 } ,
{ " primary_key " : 1 , " a " : 0 } ,
] ) )
. unwrap ( ) ;
db_snap! ( index , documents_ids , @ " [0, 1, 4, ] " ) ;
db_snap! ( index , external_documents_ids , @ r ###"
soft :
hard :
1 4
11 0
4 1
" ###);
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let search = Search ::new ( & rtxn , & index ) ;
2023-06-07 00:26:33 +08:00
let SearchResult {
matching_words : _ ,
candidates : _ ,
document_scores : _ ,
mut documents_ids ,
} = search . execute ( ) . unwrap ( ) ;
2022-12-20 17:37:50 +08:00
let primary_key_id = index . fields_ids_map ( & rtxn ) . unwrap ( ) . id ( " primary_key " ) . unwrap ( ) ;
documents_ids . sort_unstable ( ) ;
let docs = index . documents ( & rtxn , documents_ids ) . unwrap ( ) ;
let mut all_ids = HashSet ::new ( ) ;
for ( _docid , obkv ) in docs {
let id = obkv . get ( primary_key_id ) . unwrap ( ) ;
assert! ( all_ids . insert ( id ) ) ;
}
}
2023-01-18 20:24:26 +08:00
#[ test ]
fn bug_3007 ( ) {
// https://github.com/meilisearch/meilisearch/issues/3007
use crate ::error ::{ GeoError , UserError } ;
let index = TempIndex ::new ( ) ;
// Given is an index with a geo field NOT contained in the sortable_fields of the settings
index
. update_settings ( | settings | {
settings . set_primary_key ( " id " . to_string ( ) ) ;
settings . set_filterable_fields ( HashSet ::from ( [ " _geo " . to_string ( ) ] ) ) ;
} )
. unwrap ( ) ;
// happy path
index . add_documents ( documents! ( { " id " : 5 , " _geo " : { " lat " : 12.0 , " lng " : 11.0 } } ) ) . unwrap ( ) ;
db_snap! ( index , geo_faceted_documents_ids ) ;
// both are unparseable, we expect GeoError::BadLatitudeAndLongitude
let err1 = index
. add_documents (
documents! ( { " id " : 6 , " _geo " : { " lat " : " unparseable " , " lng " : " unparseable " } } ) ,
)
. unwrap_err ( ) ;
assert! ( matches! (
err1 ,
2023-01-18 20:24:46 +08:00
Error ::UserError ( UserError ::InvalidGeoField ( GeoError ::BadLatitudeAndLongitude { .. } ) )
2023-01-18 20:24:26 +08:00
) ) ;
db_snap! ( index , geo_faceted_documents_ids ) ; // ensure that no more document was inserted
}
2023-01-24 19:20:50 +08:00
#[ test ]
fn unexpected_extra_fields_in_geo_field ( ) {
let index = TempIndex ::new ( ) ;
index
. update_settings ( | settings | {
settings . set_primary_key ( " id " . to_string ( ) ) ;
settings . set_filterable_fields ( HashSet ::from ( [ " _geo " . to_string ( ) ] ) ) ;
} )
. unwrap ( ) ;
let err = index
. add_documents (
documents! ( { " id " : " doggo " , " _geo " : { " lat " : 1 , " lng " : 2 , " doggo " : " are the best " } } ) ,
)
. unwrap_err ( ) ;
insta ::assert_display_snapshot! ( err , @ r ### "The `_geo` field in the document with the id: `"\"doggo\""` contains the following unexpected fields: `{"doggo":"are the best"}`."### ) ;
db_snap! ( index , geo_faceted_documents_ids ) ; // ensure that no documents were inserted
// multiple fields and complex values
let err = index
. add_documents (
documents! ( { " id " : " doggo " , " _geo " : { " lat " : 1 , " lng " : 2 , " doggo " : " are the best " , " and " : { " all " : [ " cats " , { " are " : " beautiful " } ] } } } ) ,
)
. unwrap_err ( ) ;
insta ::assert_display_snapshot! ( err , @ r ### "The `_geo` field in the document with the id: `"\"doggo\""` contains the following unexpected fields: `{"and":{"all":["cats",{"are":"beautiful"}]},"doggo":"are the best"}`."### ) ;
db_snap! ( index , geo_faceted_documents_ids ) ; // ensure that no documents were inserted
}
2021-03-31 23:14:23 +08:00
}