diff --git a/Cargo.lock b/Cargo.lock index f2fe02366..904d1c225 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1736,6 +1736,9 @@ name = "hashbrown" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash 0.7.6", +] [[package]] name = "hashbrown" @@ -1746,12 +1749,6 @@ dependencies = [ "ahash 0.7.6", ] -[[package]] -name = "header-vec" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda7e66d32131841c4264e34a32c934df0dedb08d737f861326d616d4338f06f" - [[package]] name = "heapless" version = "0.7.16" @@ -1835,19 +1832,6 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" -[[package]] -name = "hgg" -version = "0.4.2-alpha.0" -source = "git+https://github.com/rust-cv/hgg#6d1eacde635158163fb663d9327a2d6f612dd435" -dependencies = [ - "ahash 0.7.6", - "hashbrown 0.11.2", - "header-vec", - "num-traits", - "serde", - "space", -] - [[package]] name = "hmac" version = "0.12.1" @@ -1857,6 +1841,22 @@ dependencies = [ "digest", ] +[[package]] +name = "hnsw" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b9740ebf8769ec4ad6762cc951ba18f39bba6dfbc2fbbe46285f7539af79752" +dependencies = [ + "ahash 0.7.6", + "hashbrown 0.11.2", + "libm", + "num-traits", + "rand_core", + "serde", + "smallvec", + "space", +] + [[package]] name = "http" version = "0.2.9" @@ -2729,7 +2729,7 @@ dependencies = [ "geoutils", "grenad", "heed", - "hgg", + "hnsw", "insta", "itertools", "json-depth-checker", @@ -2744,6 +2744,7 @@ dependencies = [ "once_cell", "ordered-float", "rand", + "rand_pcg", "rayon", "roaring", "rstar", @@ -3306,6 +3307,16 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rand_pcg" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e" +dependencies = [ + "rand_core", + "serde", +] + [[package]] name = "rayon" version = "1.7.0" @@ -3765,6 +3776,9 @@ name = "smallvec" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +dependencies = [ + "serde", +] [[package]] name = "smartstring" diff --git a/milli/Cargo.toml b/milli/Cargo.toml index c17d100f5..08f0c2645 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -33,13 +33,14 @@ heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.6", default-f "lmdb", "sync-read-txn", ] } -hgg = { git = "https://github.com/rust-cv/hgg", features = ["serde"] } +hnsw = { version = "0.11.0", features = ["serde1"] } json-depth-checker = { path = "../json-depth-checker" } levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] } memmap2 = "0.5.10" obkv = "0.2.0" once_cell = "1.17.1" ordered-float = "3.6.0" +rand_pcg = { version = "0.3.1", features = ["serde1"] } rayon = "1.7.0" roaring = "0.10.1" rstar = { version = "0.10.0", features = ["serde"] } diff --git a/milli/src/distance.rs b/milli/src/dot_product.rs similarity index 100% rename from milli/src/distance.rs rename to milli/src/dot_product.rs diff --git a/milli/src/index.rs b/milli/src/index.rs index e29c6da22..4cdfb010c 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -8,11 +8,12 @@ use charabia::{Language, Script}; use heed::flags::Flags; use heed::types::*; use heed::{CompactionOption, Database, PolyDatabase, RoTxn, RwTxn}; +use rand_pcg::Pcg32; use roaring::RoaringBitmap; use rstar::RTree; use time::OffsetDateTime; -use crate::distance::Euclidean; +use crate::dot_product::DotProduct; use crate::error::{InternalError, UserError}; use crate::facet::FacetType; use crate::fields_ids_map::FieldsIdsMap; @@ -27,8 +28,8 @@ use crate::{ Result, RoaringBitmapCodec, RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16, BEU32, }; -/// The HGG data-structure that we serialize, fill and search in. -pub type Hgg = hgg::Hgg, DocumentId>; +/// The HNSW data-structure that we serialize, fill and search in. +pub type Hnsw = hnsw::Hnsw, Pcg32, 12, 24>; pub const DEFAULT_MIN_WORD_LEN_ONE_TYPO: u8 = 5; pub const DEFAULT_MIN_WORD_LEN_TWO_TYPOS: u8 = 9; @@ -46,7 +47,7 @@ pub mod main_key { pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map"; pub const GEO_FACETED_DOCUMENTS_IDS_KEY: &str = "geo-faceted-documents-ids"; pub const GEO_RTREE_KEY: &str = "geo-rtree"; - pub const VECTOR_HGG_KEY: &str = "vector-hgg"; + pub const VECTOR_HNSW_KEY: &str = "vector-hnsw"; pub const HARD_EXTERNAL_DOCUMENTS_IDS_KEY: &str = "hard-external-documents-ids"; pub const NUMBER_FACETED_DOCUMENTS_IDS_PREFIX: &str = "number-faceted-documents-ids"; pub const PRIMARY_KEY_KEY: &str = "primary-key"; @@ -91,6 +92,7 @@ pub mod db_name { pub const FACET_ID_STRING_DOCIDS: &str = "facet-id-string-docids"; pub const FIELD_ID_DOCID_FACET_F64S: &str = "field-id-docid-facet-f64s"; pub const FIELD_ID_DOCID_FACET_STRINGS: &str = "field-id-docid-facet-strings"; + pub const VECTOR_ID_DOCID: &str = "vector-id-docids"; pub const DOCUMENTS: &str = "documents"; pub const SCRIPT_LANGUAGE_DOCIDS: &str = "script_language_docids"; } @@ -154,6 +156,9 @@ pub struct Index { /// Maps the document id, the facet field id and the strings. pub field_id_docid_facet_strings: Database, + /// Maps a vector id to the document id that have it. + pub vector_id_docid: Database, OwnedType>, + /// Maps the document id to the document as an obkv store. pub(crate) documents: Database, ObkvCodec>, } @@ -167,7 +172,7 @@ impl Index { ) -> Result { use db_name::*; - options.max_dbs(23); + options.max_dbs(24); unsafe { options.flag(Flags::MdbAlwaysFreePages) }; let env = options.open(path)?; @@ -207,6 +212,7 @@ impl Index { env.create_database(&mut wtxn, Some(FIELD_ID_DOCID_FACET_F64S))?; let field_id_docid_facet_strings = env.create_database(&mut wtxn, Some(FIELD_ID_DOCID_FACET_STRINGS))?; + let vector_id_docid = env.create_database(&mut wtxn, Some(VECTOR_ID_DOCID))?; let documents = env.create_database(&mut wtxn, Some(DOCUMENTS))?; wtxn.commit()?; @@ -235,6 +241,7 @@ impl Index { facet_id_is_empty_docids, field_id_docid_facet_f64s, field_id_docid_facet_strings, + vector_id_docid, documents, }) } @@ -506,22 +513,22 @@ impl Index { } } - /* vector HGG */ + /* vector HNSW */ - /// Writes the provided `hgg`. - pub(crate) fn put_vector_hgg(&self, wtxn: &mut RwTxn, hgg: &Hgg) -> heed::Result<()> { - self.main.put::<_, Str, SerdeBincode>(wtxn, main_key::VECTOR_HGG_KEY, hgg) + /// Writes the provided `hnsw`. + pub(crate) fn put_vector_hnsw(&self, wtxn: &mut RwTxn, hnsw: &Hnsw) -> heed::Result<()> { + self.main.put::<_, Str, SerdeBincode>(wtxn, main_key::VECTOR_HNSW_KEY, hnsw) } - /// Delete the `hgg`. - pub(crate) fn delete_vector_hgg(&self, wtxn: &mut RwTxn) -> heed::Result { - self.main.delete::<_, Str>(wtxn, main_key::VECTOR_HGG_KEY) + /// Delete the `hnsw`. + pub(crate) fn delete_vector_hnsw(&self, wtxn: &mut RwTxn) -> heed::Result { + self.main.delete::<_, Str>(wtxn, main_key::VECTOR_HNSW_KEY) } - /// Returns the `hgg`. - pub fn vector_hgg(&self, rtxn: &RoTxn) -> Result> { - match self.main.get::<_, Str, SerdeBincode>(rtxn, main_key::VECTOR_HGG_KEY)? { - Some(hgg) => Ok(Some(hgg)), + /// Returns the `hnsw`. + pub fn vector_hnsw(&self, rtxn: &RoTxn) -> Result> { + match self.main.get::<_, Str, SerdeBincode>(rtxn, main_key::VECTOR_HNSW_KEY)? { + Some(hnsw) => Ok(Some(hnsw)), None => Ok(None), } } diff --git a/milli/src/lib.rs b/milli/src/lib.rs index 4c7428fa8..a1dc6ca4f 100644 --- a/milli/src/lib.rs +++ b/milli/src/lib.rs @@ -10,7 +10,7 @@ pub mod documents; mod asc_desc; mod criterion; -mod distance; +pub mod dot_product; mod error; mod external_documents_ids; pub mod facet; @@ -20,6 +20,7 @@ pub mod index; pub mod proximity; pub mod score_details; mod search; +mod search; pub mod update; #[cfg(test)] diff --git a/milli/src/search/new/mod.rs b/milli/src/search/new/mod.rs index f1aa21484..948a2fa21 100644 --- a/milli/src/search/new/mod.rs +++ b/milli/src/search/new/mod.rs @@ -28,6 +28,7 @@ use db_cache::DatabaseCache; use exact_attribute::ExactAttribute; use graph_based_ranking_rule::{Exactness, Fid, Position, Proximity, Typo}; use heed::RoTxn; +use hnsw::Searcher; use interner::{DedupInterner, Interner}; pub use logger::visual::VisualSearchLogger; pub use logger::{DefaultSearchLogger, SearchLogger}; @@ -39,7 +40,7 @@ use ranking_rules::{ use resolve_query_graph::{compute_query_graph_docids, PhraseDocIdsCache}; use roaring::RoaringBitmap; use sort::Sort; -use space::{KnnMap, Neighbor}; +use space::Neighbor; use self::geo_sort::GeoSort; pub use self::geo_sort::Strategy as GeoSortStrategy; @@ -47,7 +48,9 @@ use self::graph_based_ranking_rule::Words; use self::interner::Interned; use crate::score_details::{ScoreDetails, ScoringStrategy}; use crate::search::new::distinct::apply_distinct_rule; -use crate::{AscDesc, DocumentId, Filter, Index, Member, Result, TermsMatchingStrategy, UserError}; +use crate::{ + AscDesc, DocumentId, Filter, Index, Member, Result, TermsMatchingStrategy, UserError, BEU32, +}; /// A structure used throughout the execution of a search query. pub struct SearchContext<'ctx> { @@ -447,15 +450,26 @@ pub fn execute_search( let docids = match vector { Some(vector) => { // return the nearest documents that are also part of the candidates. - let hgg = ctx.index.vector_hgg(ctx.txn)?.unwrap_or_default(); - hgg.knn_values(&vector, 100) - .into_iter() - .filter(|(Neighbor { distance, .. }, docid)| { - dbg!(distance, f32::from_bits(*distance)); - universe.contains(**docid) - }) - .map(|(_, docid)| *docid) - .collect() + let mut searcher = Searcher::new(); + let hnsw = ctx.index.vector_hnsw(ctx.txn)?.unwrap_or_default(); + let ef = hnsw.len().min(100); + let mut dest = vec![Neighbor { index: 0, distance: 0 }; ef]; + let neighbors = hnsw.nearest(&vector, ef, &mut searcher, &mut dest[..]); + + let mut docids = Vec::new(); + for Neighbor { index, distance } in neighbors.iter() { + let index = BEU32::new(*index as u32); + let docid = ctx.index.vector_id_docid.get(ctx.txn, &index)?.unwrap().get(); + dbg!(distance, f32::from_bits(*distance)); + if universe.contains(docid) { + docids.push(docid); + if docids.len() == length { + break; + } + } + } + + docids } // return the search docids if the vector field is not specified None => docids, diff --git a/milli/src/update/clear_documents.rs b/milli/src/update/clear_documents.rs index e5e7f5491..f4a2d43fe 100644 --- a/milli/src/update/clear_documents.rs +++ b/milli/src/update/clear_documents.rs @@ -39,6 +39,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> { facet_id_is_empty_docids, field_id_docid_facet_f64s, field_id_docid_facet_strings, + vector_id_docid, documents, } = self.index; @@ -57,7 +58,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> { self.index.put_field_distribution(self.wtxn, &FieldDistribution::default())?; self.index.delete_geo_rtree(self.wtxn)?; self.index.delete_geo_faceted_documents_ids(self.wtxn)?; - self.index.delete_vector_hgg(self.wtxn)?; + self.index.delete_vector_hnsw(self.wtxn)?; // We clean all the faceted documents ids. for field_id in faceted_fields { @@ -96,6 +97,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> { facet_id_string_docids.clear(self.wtxn)?; field_id_docid_facet_f64s.clear(self.wtxn)?; field_id_docid_facet_strings.clear(self.wtxn)?; + vector_id_docid.clear(self.wtxn)?; documents.clear(self.wtxn)?; Ok(number_of_documents) diff --git a/milli/src/update/delete_documents.rs b/milli/src/update/delete_documents.rs index 890c2b329..73af66a95 100644 --- a/milli/src/update/delete_documents.rs +++ b/milli/src/update/delete_documents.rs @@ -240,6 +240,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { facet_id_exists_docids, facet_id_is_null_docids, facet_id_is_empty_docids, + vector_id_docid, documents, } = self.index; // Remove from the documents database @@ -274,8 +275,6 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { &mut words_to_delete, )?; - todo!("delete the documents from the Hgg datastructure"); - // We construct an FST set that contains the words to delete from the words FST. let words_to_delete = fst::Set::from_iter(words_to_delete.difference(&words_to_keep))?; diff --git a/milli/src/update/index_documents/typed_chunk.rs b/milli/src/update/index_documents/typed_chunk.rs index 122484a6d..e136dc139 100644 --- a/milli/src/update/index_documents/typed_chunk.rs +++ b/milli/src/update/index_documents/typed_chunk.rs @@ -9,8 +9,8 @@ use charabia::{Language, Script}; use grenad::MergerBuilder; use heed::types::ByteSlice; use heed::RwTxn; +use hnsw::Searcher; use roaring::RoaringBitmap; -use space::KnnInsert; use super::helpers::{ self, merge_ignore_values, serialize_roaring_bitmap, valid_lmdb_key, CursorClonableMmap, @@ -19,7 +19,7 @@ use super::{ClonableMmap, MergeFn}; use crate::facet::FacetType; use crate::update::facet::FacetsUpdate; use crate::update::index_documents::helpers::as_cloneable_grenad; -use crate::{lat_lng_to_xyz, CboRoaringBitmapCodec, DocumentId, GeoPoint, Index, Result}; +use crate::{lat_lng_to_xyz, CboRoaringBitmapCodec, DocumentId, GeoPoint, Index, Result, BEU32}; pub(crate) enum TypedChunk { FieldIdDocidFacetStrings(grenad::Reader), @@ -225,17 +225,20 @@ pub(crate) fn write_typed_chunk_into_index( index.put_geo_faceted_documents_ids(wtxn, &geo_faceted_docids)?; } TypedChunk::VectorPoints(vector_points) => { - let mut hgg = index.vector_hgg(wtxn)?.unwrap_or_default(); + let mut hnsw = index.vector_hnsw(wtxn)?.unwrap_or_default(); + let mut searcher = Searcher::new(); + let mut cursor = vector_points.into_cursor()?; while let Some((key, value)) = cursor.move_on_next()? { // convert the key back to a u32 (4 bytes) let docid = key.try_into().map(DocumentId::from_be_bytes).unwrap(); - // convert the vector back to a Vec and insert it. - // TODO enable again when the library is fixed - hgg.insert(pod_collect_to_vec(value), docid); + // convert the vector back to a Vec + let vector: Vec = pod_collect_to_vec(value); + let vector_id = hnsw.insert(vector, &mut searcher) as u32; + index.vector_id_docid.put(wtxn, &BEU32::new(vector_id), &BEU32::new(docid))?; } - log::debug!("There are {} entries in the HGG so far", hgg.len()); - index.put_vector_hgg(wtxn, &hgg)?; + log::debug!("There are {} entries in the HNSW so far", hnsw.len()); + index.put_vector_hnsw(wtxn, &hnsw)?; } TypedChunk::ScriptLanguageDocids(hash_pair) => { let mut buffer = Vec::new();