2024-10-03 18:08:09 +02:00
|
|
|
use std::cell::RefCell;
|
2024-10-10 22:42:37 +02:00
|
|
|
use std::sync::RwLock;
|
2024-09-02 15:21:00 +02:00
|
|
|
use std::thread::{self, Builder};
|
2024-09-02 10:42:19 +02:00
|
|
|
|
|
|
|
use big_s::S;
|
2024-10-03 18:08:09 +02:00
|
|
|
use bumpalo::Bump;
|
|
|
|
use document_changes::{
|
2024-10-16 17:36:41 +02:00
|
|
|
for_each_document_change, DocumentChanges, Extractor, FullySend, IndexingContext, RefCellExt,
|
|
|
|
ThreadLocal,
|
2024-10-03 18:08:09 +02:00
|
|
|
};
|
2024-09-02 14:42:27 +02:00
|
|
|
pub use document_deletion::DocumentDeletion;
|
|
|
|
pub use document_operation::DocumentOperation;
|
2024-09-02 19:39:48 +02:00
|
|
|
use heed::{RoTxn, RwTxn};
|
2024-09-02 14:42:27 +02:00
|
|
|
pub use partial_dump::PartialDump;
|
2024-09-02 10:42:19 +02:00
|
|
|
use rayon::ThreadPool;
|
2024-10-09 13:53:34 +02:00
|
|
|
use time::OffsetDateTime;
|
2024-09-02 14:42:27 +02:00
|
|
|
pub use update_by_function::UpdateByFunction;
|
2024-09-02 10:42:19 +02:00
|
|
|
|
2024-09-05 10:56:22 +02:00
|
|
|
use super::channel::*;
|
2024-10-03 18:08:09 +02:00
|
|
|
use super::document::write_to_obkv;
|
2024-10-10 22:42:37 +02:00
|
|
|
use super::document_change::DocumentChange;
|
2024-09-05 10:56:22 +02:00
|
|
|
use super::extract::*;
|
2024-10-01 16:13:08 +02:00
|
|
|
use super::merger::{merge_grenad_entries, FacetFieldIdsDelta};
|
2024-10-01 09:56:49 +02:00
|
|
|
use super::word_fst_builder::PrefixDelta;
|
|
|
|
use super::words_prefix_docids::{
|
|
|
|
compute_word_prefix_docids, compute_word_prefix_fid_docids, compute_word_prefix_position_docids,
|
|
|
|
};
|
2024-10-10 22:42:37 +02:00
|
|
|
use super::{StdResult, TopLevelMap};
|
2024-09-12 15:38:31 +02:00
|
|
|
use crate::documents::{PrimaryKey, DEFAULT_PRIMARY_KEY};
|
2024-10-01 16:13:08 +02:00
|
|
|
use crate::facet::FacetType;
|
2024-10-09 11:35:45 +02:00
|
|
|
use crate::proximity::ProximityPrecision;
|
2024-09-23 11:07:59 +02:00
|
|
|
use crate::update::new::channel::ExtractorSender;
|
2024-10-14 11:12:10 +02:00
|
|
|
use crate::update::new::words_prefix_docids::compute_exact_word_prefix_docids;
|
2024-10-03 18:08:09 +02:00
|
|
|
use crate::update::settings::InnerIndexSettings;
|
2024-10-01 16:13:08 +02:00
|
|
|
use crate::update::{FacetsUpdateBulk, GrenadParameters};
|
2024-10-09 11:35:45 +02:00
|
|
|
use crate::{Error, FieldsIdsMap, GlobalFieldsIdsMap, Index, Result, UserError};
|
2024-09-02 10:42:19 +02:00
|
|
|
|
2024-10-14 15:41:41 +02:00
|
|
|
pub(crate) mod de;
|
2024-10-03 18:08:09 +02:00
|
|
|
pub mod document_changes;
|
2024-09-02 10:42:19 +02:00
|
|
|
mod document_deletion;
|
|
|
|
mod document_operation;
|
|
|
|
mod partial_dump;
|
|
|
|
mod update_by_function;
|
|
|
|
|
2024-10-03 18:08:09 +02:00
|
|
|
struct DocumentExtractor<'a> {
|
|
|
|
document_sender: &'a DocumentSender<'a>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'extractor> Extractor<'extractor> for DocumentExtractor<'a> {
|
|
|
|
type Data = FullySend<()>;
|
|
|
|
|
|
|
|
fn init_data(
|
|
|
|
&self,
|
2024-10-10 22:42:37 +02:00
|
|
|
_extractor_alloc: raw_collections::alloc::RefBump<'extractor>,
|
2024-10-03 18:08:09 +02:00
|
|
|
) -> Result<Self::Data> {
|
|
|
|
Ok(FullySend(()))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn process(
|
|
|
|
&self,
|
|
|
|
change: DocumentChange,
|
|
|
|
context: &document_changes::DocumentChangeContext<Self::Data>,
|
|
|
|
) -> Result<()> {
|
|
|
|
let mut document_buffer = Vec::new();
|
|
|
|
|
2024-10-16 17:36:41 +02:00
|
|
|
let new_fields_ids_map = context.new_fields_ids_map.borrow_or_yield();
|
2024-10-03 18:08:09 +02:00
|
|
|
let new_fields_ids_map = &*new_fields_ids_map;
|
|
|
|
let new_fields_ids_map = new_fields_ids_map.local_map();
|
|
|
|
|
|
|
|
let external_docid = change.external_docid().to_owned();
|
|
|
|
|
|
|
|
// document but we need to create a function that collects and compresses documents.
|
|
|
|
match change {
|
|
|
|
DocumentChange::Deletion(deletion) => {
|
|
|
|
let docid = deletion.docid();
|
|
|
|
self.document_sender.delete(docid, external_docid).unwrap();
|
|
|
|
}
|
|
|
|
/// TODO: change NONE by SOME(vector) when implemented
|
|
|
|
DocumentChange::Update(update) => {
|
|
|
|
let docid = update.docid();
|
|
|
|
let content =
|
|
|
|
update.new(&context.txn, context.index, &context.db_fields_ids_map)?;
|
|
|
|
let content =
|
|
|
|
write_to_obkv(&content, None, new_fields_ids_map, &mut document_buffer)?;
|
|
|
|
self.document_sender.insert(docid, external_docid, content.boxed()).unwrap();
|
|
|
|
}
|
|
|
|
DocumentChange::Insertion(insertion) => {
|
|
|
|
let docid = insertion.docid();
|
|
|
|
let content = insertion.new();
|
|
|
|
let content =
|
|
|
|
write_to_obkv(&content, None, new_fields_ids_map, &mut document_buffer)?;
|
|
|
|
self.document_sender.insert(docid, external_docid, content.boxed()).unwrap();
|
|
|
|
// extracted_dictionary_sender.send(self, dictionary: &[u8]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
2024-09-02 10:42:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/// This is the main function of this crate.
|
|
|
|
///
|
|
|
|
/// Give it the output of the [`Indexer::document_changes`] method and it will execute it in the [`rayon::ThreadPool`].
|
|
|
|
///
|
|
|
|
/// TODO return stats
|
2024-10-03 18:08:09 +02:00
|
|
|
pub fn index<'pl, 'indexer, 'index, DC>(
|
2024-09-02 10:42:19 +02:00
|
|
|
wtxn: &mut RwTxn,
|
2024-10-03 18:08:09 +02:00
|
|
|
index: &'index Index,
|
|
|
|
db_fields_ids_map: &'indexer FieldsIdsMap,
|
|
|
|
new_fields_ids_map: FieldsIdsMap,
|
2024-10-16 09:27:00 +02:00
|
|
|
new_primary_key: Option<PrimaryKey<'pl>>,
|
2024-09-02 10:42:19 +02:00
|
|
|
pool: &ThreadPool,
|
2024-10-03 18:08:09 +02:00
|
|
|
document_changes: &DC,
|
2024-09-02 10:42:19 +02:00
|
|
|
) -> Result<()>
|
|
|
|
where
|
2024-10-03 18:08:09 +02:00
|
|
|
DC: DocumentChanges<'pl>,
|
2024-09-02 10:42:19 +02:00
|
|
|
{
|
2024-09-05 15:12:07 +02:00
|
|
|
let (merger_sender, writer_receiver) = merger_writer_channel(10_000);
|
2024-09-04 11:39:53 +02:00
|
|
|
// This channel acts as a rendezvous point to ensure that we are one task ahead
|
2024-09-11 10:20:23 +02:00
|
|
|
let (extractor_sender, merger_receiver) = extractors_merger_channels(4);
|
2024-09-02 10:42:19 +02:00
|
|
|
|
2024-10-03 18:08:09 +02:00
|
|
|
let new_fields_ids_map = RwLock::new(new_fields_ids_map);
|
|
|
|
|
|
|
|
let fields_ids_map_store = ThreadLocal::with_capacity(pool.current_num_threads());
|
|
|
|
let mut extractor_allocs = ThreadLocal::with_capacity(pool.current_num_threads());
|
|
|
|
let doc_allocs = ThreadLocal::with_capacity(pool.current_num_threads());
|
|
|
|
|
|
|
|
let indexing_context = IndexingContext {
|
|
|
|
index,
|
|
|
|
db_fields_ids_map,
|
|
|
|
new_fields_ids_map: &new_fields_ids_map,
|
|
|
|
doc_allocs: &doc_allocs,
|
|
|
|
fields_ids_map_store: &fields_ids_map_store,
|
|
|
|
};
|
2024-09-03 12:01:01 +02:00
|
|
|
|
2024-09-02 10:42:19 +02:00
|
|
|
thread::scope(|s| {
|
2024-10-01 16:13:08 +02:00
|
|
|
let indexer_span = tracing::Span::current();
|
2024-09-02 10:42:19 +02:00
|
|
|
// TODO manage the errors correctly
|
2024-09-03 16:08:33 +02:00
|
|
|
let handle = Builder::new().name(S("indexer-extractors")).spawn_scoped(s, move || {
|
2024-09-02 15:21:00 +02:00
|
|
|
pool.in_place_scope(|_s| {
|
2024-10-01 16:13:08 +02:00
|
|
|
let span = tracing::trace_span!(target: "indexing::documents", parent: &indexer_span, "extract");
|
2024-09-05 17:36:19 +02:00
|
|
|
let _entered = span.enter();
|
2024-09-05 10:56:22 +02:00
|
|
|
|
|
|
|
// document but we need to create a function that collects and compresses documents.
|
2024-09-12 18:01:02 +02:00
|
|
|
let document_sender = extractor_sender.document_sender();
|
2024-10-03 18:08:09 +02:00
|
|
|
let document_extractor = DocumentExtractor { document_sender: &document_sender};
|
|
|
|
let datastore = ThreadLocal::with_capacity(pool.current_num_threads());
|
|
|
|
for_each_document_change(document_changes, &document_extractor, indexing_context, &mut extractor_allocs, &datastore)?;
|
2024-09-05 10:56:22 +02:00
|
|
|
|
2024-09-12 18:01:02 +02:00
|
|
|
document_sender.finish().unwrap();
|
|
|
|
|
2024-09-05 15:12:07 +02:00
|
|
|
const TEN_GIB: usize = 10 * 1024 * 1024 * 1024;
|
|
|
|
let max_memory = TEN_GIB / dbg!(rayon::current_num_threads());
|
|
|
|
let grenad_parameters = GrenadParameters {
|
|
|
|
max_memory: Some(max_memory),
|
|
|
|
..GrenadParameters::default()
|
|
|
|
};
|
2024-09-16 09:34:10 +02:00
|
|
|
|
|
|
|
{
|
|
|
|
let span = tracing::trace_span!(target: "indexing::documents::extract", "faceted");
|
|
|
|
let _entered = span.enter();
|
|
|
|
extract_and_send_docids::<
|
2024-10-03 18:08:09 +02:00
|
|
|
_,
|
2024-09-16 09:34:10 +02:00
|
|
|
FacetedDocidsExtractor,
|
|
|
|
FacetDocids,
|
|
|
|
>(
|
|
|
|
grenad_parameters,
|
2024-10-03 18:08:09 +02:00
|
|
|
document_changes,
|
|
|
|
indexing_context,
|
|
|
|
&mut extractor_allocs,
|
2024-09-16 09:34:10 +02:00
|
|
|
&extractor_sender,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
|
2024-09-05 17:36:19 +02:00
|
|
|
{
|
|
|
|
let span = tracing::trace_span!(target: "indexing::documents::extract", "word_docids");
|
|
|
|
let _entered = span.enter();
|
|
|
|
|
2024-09-11 10:20:23 +02:00
|
|
|
let WordDocidsMergers {
|
|
|
|
word_fid_docids,
|
|
|
|
word_docids,
|
|
|
|
exact_word_docids,
|
|
|
|
word_position_docids,
|
|
|
|
fid_word_count_docids,
|
2024-10-03 18:08:09 +02:00
|
|
|
} = WordDocidsExtractors::run_extraction(grenad_parameters, document_changes, indexing_context, &mut extractor_allocs)?;
|
2024-09-11 10:20:23 +02:00
|
|
|
extractor_sender.send_searchable::<WordDocids>(word_docids).unwrap();
|
|
|
|
extractor_sender.send_searchable::<WordFidDocids>(word_fid_docids).unwrap();
|
|
|
|
extractor_sender.send_searchable::<ExactWordDocids>(exact_word_docids).unwrap();
|
|
|
|
extractor_sender.send_searchable::<WordPositionDocids>(word_position_docids).unwrap();
|
|
|
|
extractor_sender.send_searchable::<FidWordCountDocids>(fid_word_count_docids).unwrap();
|
2024-09-05 17:36:19 +02:00
|
|
|
}
|
2024-09-10 18:01:17 +01:00
|
|
|
|
2024-10-09 11:35:45 +02:00
|
|
|
// run the proximity extraction only if the precision is by word
|
|
|
|
// this works only if the settings didn't change during this transaction.
|
|
|
|
let rtxn = index.read_txn().unwrap();
|
|
|
|
let proximity_precision = index.proximity_precision(&rtxn)?.unwrap_or_default();
|
|
|
|
if proximity_precision == ProximityPrecision::ByWord {
|
2024-09-05 17:36:19 +02:00
|
|
|
let span = tracing::trace_span!(target: "indexing::documents::extract", "word_pair_proximity_docids");
|
|
|
|
let _entered = span.enter();
|
|
|
|
extract_and_send_docids::<
|
2024-10-03 18:08:09 +02:00
|
|
|
_,
|
2024-09-05 17:36:19 +02:00
|
|
|
WordPairProximityDocidsExtractor,
|
|
|
|
WordPairProximityDocids,
|
|
|
|
>(
|
|
|
|
grenad_parameters,
|
2024-10-03 18:08:09 +02:00
|
|
|
document_changes,
|
|
|
|
indexing_context,
|
|
|
|
&mut extractor_allocs,
|
2024-09-05 17:36:19 +02:00
|
|
|
&extractor_sender,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
let span = tracing::trace_span!(target: "indexing::documents::extract", "FINISH");
|
|
|
|
let _entered = span.enter();
|
|
|
|
}
|
2024-09-05 10:56:22 +02:00
|
|
|
|
|
|
|
// TODO THIS IS TOO MUCH
|
2024-09-16 09:34:10 +02:00
|
|
|
// - [ ] Extract fieldid docid facet number
|
|
|
|
// - [ ] Extract fieldid docid facet string
|
|
|
|
// - [ ] Extract facetid string fst
|
|
|
|
// - [ ] Extract facetid normalized string strings
|
2024-09-05 10:56:22 +02:00
|
|
|
|
|
|
|
// TODO Inverted Indexes again
|
2024-09-16 09:34:10 +02:00
|
|
|
// - [x] Extract fieldid facet isempty docids
|
|
|
|
// - [x] Extract fieldid facet isnull docids
|
|
|
|
// - [x] Extract fieldid facet exists docids
|
2024-09-05 10:56:22 +02:00
|
|
|
|
|
|
|
// TODO This is the normal system
|
2024-09-16 09:34:10 +02:00
|
|
|
// - [x] Extract fieldid facet number docids
|
|
|
|
// - [x] Extract fieldid facet string docids
|
2024-09-05 10:56:22 +02:00
|
|
|
|
2024-09-04 09:59:19 +02:00
|
|
|
Ok(()) as Result<_>
|
2024-09-05 10:56:22 +02:00
|
|
|
})
|
2024-09-02 15:21:00 +02:00
|
|
|
})?;
|
2024-09-02 10:42:19 +02:00
|
|
|
|
2024-10-03 18:08:09 +02:00
|
|
|
let global_fields_ids_map = GlobalFieldsIdsMap::new(&new_fields_ids_map);
|
|
|
|
|
2024-10-01 16:13:08 +02:00
|
|
|
let indexer_span = tracing::Span::current();
|
2024-09-02 10:42:19 +02:00
|
|
|
// TODO manage the errors correctly
|
2024-10-01 09:56:49 +02:00
|
|
|
let merger_thread = Builder::new().name(S("indexer-merger")).spawn_scoped(s, move || {
|
2024-09-05 17:36:19 +02:00
|
|
|
let span =
|
2024-10-01 16:13:08 +02:00
|
|
|
tracing::trace_span!(target: "indexing::documents", parent: &indexer_span, "merge");
|
2024-09-05 17:36:19 +02:00
|
|
|
let _entered = span.enter();
|
2024-09-02 10:42:19 +02:00
|
|
|
let rtxn = index.read_txn().unwrap();
|
2024-09-12 18:01:02 +02:00
|
|
|
merge_grenad_entries(
|
|
|
|
merger_receiver,
|
|
|
|
merger_sender,
|
|
|
|
&rtxn,
|
|
|
|
index,
|
2024-10-03 18:08:09 +02:00
|
|
|
global_fields_ids_map,
|
2024-09-12 18:01:02 +02:00
|
|
|
)
|
2024-09-02 10:42:19 +02:00
|
|
|
})?;
|
|
|
|
|
|
|
|
for operation in writer_receiver {
|
|
|
|
let database = operation.database(index);
|
2024-09-04 09:59:19 +02:00
|
|
|
match operation.entry() {
|
|
|
|
EntryOperation::Delete(e) => {
|
|
|
|
if !database.delete(wtxn, e.entry())? {
|
|
|
|
unreachable!("We tried to delete an unknown key")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
EntryOperation::Write(e) => database.put(wtxn, e.key(), e.value())?,
|
2024-09-02 10:42:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-09-02 15:10:21 +02:00
|
|
|
/// TODO handle the panicking threads
|
|
|
|
handle.join().unwrap()?;
|
2024-10-01 09:56:49 +02:00
|
|
|
let merger_result = merger_thread.join().unwrap()?;
|
|
|
|
|
2024-10-01 16:13:08 +02:00
|
|
|
if let Some(facet_field_ids_delta) = merger_result.facet_field_ids_delta {
|
|
|
|
compute_facet_level_database(index, wtxn, facet_field_ids_delta)?;
|
|
|
|
}
|
2024-10-01 09:56:49 +02:00
|
|
|
|
2024-10-01 16:13:08 +02:00
|
|
|
if let Some(prefix_delta) = merger_result.prefix_delta {
|
|
|
|
compute_prefix_database(index, wtxn, prefix_delta)?;
|
2024-10-01 09:56:49 +02:00
|
|
|
}
|
2024-09-02 15:10:21 +02:00
|
|
|
|
2024-09-03 12:01:01 +02:00
|
|
|
Ok(()) as Result<_>
|
|
|
|
})?;
|
|
|
|
|
2024-10-10 22:42:37 +02:00
|
|
|
// required to into_inner the new_fields_ids_map
|
2024-10-03 18:08:09 +02:00
|
|
|
drop(fields_ids_map_store);
|
|
|
|
|
|
|
|
let fields_ids_map = new_fields_ids_map.into_inner().unwrap();
|
2024-09-03 12:01:01 +02:00
|
|
|
index.put_fields_ids_map(wtxn, &fields_ids_map)?;
|
|
|
|
|
2024-10-16 09:27:00 +02:00
|
|
|
if let Some(new_primary_key) = new_primary_key {
|
|
|
|
index.put_primary_key(wtxn, new_primary_key.name())?;
|
|
|
|
}
|
|
|
|
|
2024-09-30 11:35:03 +02:00
|
|
|
// used to update the localized and weighted maps while sharing the update code with the settings pipeline.
|
|
|
|
let mut inner_index_settings = InnerIndexSettings::from_index(index, wtxn)?;
|
|
|
|
inner_index_settings.recompute_facets(wtxn, index)?;
|
|
|
|
inner_index_settings.recompute_searchables(wtxn, index)?;
|
|
|
|
|
2024-10-09 13:53:34 +02:00
|
|
|
index.set_updated_at(wtxn, &OffsetDateTime::now_utc())?;
|
|
|
|
|
2024-09-03 12:01:01 +02:00
|
|
|
Ok(())
|
2024-09-02 10:42:19 +02:00
|
|
|
}
|
2024-09-02 19:39:48 +02:00
|
|
|
|
2024-10-01 16:13:08 +02:00
|
|
|
#[tracing::instrument(level = "trace", skip_all, target = "indexing::prefix")]
|
|
|
|
fn compute_prefix_database(
|
|
|
|
index: &Index,
|
|
|
|
wtxn: &mut RwTxn,
|
|
|
|
prefix_delta: PrefixDelta,
|
|
|
|
) -> Result<()> {
|
2024-10-03 10:40:31 +02:00
|
|
|
eprintln!("prefix_delta: {:?}", &prefix_delta);
|
2024-10-01 16:13:08 +02:00
|
|
|
let PrefixDelta { modified, deleted } = prefix_delta;
|
|
|
|
// Compute word prefix docids
|
|
|
|
compute_word_prefix_docids(wtxn, index, &modified, &deleted)?;
|
2024-10-14 11:12:10 +02:00
|
|
|
// Compute exact word prefix docids
|
|
|
|
compute_exact_word_prefix_docids(wtxn, index, &modified, &deleted)?;
|
2024-10-01 16:13:08 +02:00
|
|
|
// Compute word prefix fid docids
|
|
|
|
compute_word_prefix_fid_docids(wtxn, index, &modified, &deleted)?;
|
|
|
|
// Compute word prefix position docids
|
|
|
|
compute_word_prefix_position_docids(wtxn, index, &modified, &deleted)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facet_field_ids")]
|
|
|
|
fn compute_facet_level_database(
|
|
|
|
index: &Index,
|
|
|
|
wtxn: &mut RwTxn,
|
|
|
|
facet_field_ids_delta: FacetFieldIdsDelta,
|
|
|
|
) -> Result<()> {
|
2024-10-03 10:40:31 +02:00
|
|
|
eprintln!("facet_field_ids_delta: {:?}", &facet_field_ids_delta);
|
2024-10-01 16:13:08 +02:00
|
|
|
if let Some(modified_facet_string_ids) = facet_field_ids_delta.modified_facet_string_ids() {
|
|
|
|
let span = tracing::trace_span!(target: "indexing::facet_field_ids", "string");
|
|
|
|
let _entered = span.enter();
|
|
|
|
FacetsUpdateBulk::new_not_updating_level_0(
|
|
|
|
index,
|
|
|
|
modified_facet_string_ids,
|
|
|
|
FacetType::String,
|
|
|
|
)
|
|
|
|
.execute(wtxn)?;
|
|
|
|
}
|
|
|
|
if let Some(modified_facet_number_ids) = facet_field_ids_delta.modified_facet_number_ids() {
|
|
|
|
let span = tracing::trace_span!(target: "indexing::facet_field_ids", "number");
|
|
|
|
let _entered = span.enter();
|
|
|
|
FacetsUpdateBulk::new_not_updating_level_0(
|
|
|
|
index,
|
|
|
|
modified_facet_number_ids,
|
|
|
|
FacetType::Number,
|
|
|
|
)
|
|
|
|
.execute(wtxn)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-09-04 12:17:13 +02:00
|
|
|
/// TODO: GrenadParameters::default() should be removed in favor a passed parameter
|
|
|
|
/// TODO: manage the errors correctly
|
|
|
|
/// TODO: we must have a single trait that also gives the extractor type
|
2024-10-03 18:08:09 +02:00
|
|
|
fn extract_and_send_docids<
|
|
|
|
'pl,
|
|
|
|
'fid,
|
|
|
|
'indexer,
|
|
|
|
'index,
|
|
|
|
DC: DocumentChanges<'pl>,
|
|
|
|
E: DocidsExtractor,
|
|
|
|
D: MergerOperationType,
|
|
|
|
>(
|
|
|
|
grenad_parameters: GrenadParameters,
|
|
|
|
document_changes: &DC,
|
|
|
|
indexing_context: IndexingContext<'fid, 'indexer, 'index>,
|
|
|
|
extractor_allocs: &mut ThreadLocal<FullySend<RefCell<Bump>>>,
|
2024-09-04 12:17:13 +02:00
|
|
|
sender: &ExtractorSender,
|
|
|
|
) -> Result<()> {
|
2024-10-03 18:08:09 +02:00
|
|
|
let merger =
|
|
|
|
E::run_extraction(grenad_parameters, document_changes, indexing_context, extractor_allocs)?;
|
2024-09-30 16:08:29 +02:00
|
|
|
sender.send_searchable::<D>(merger).unwrap();
|
|
|
|
Ok(())
|
2024-09-04 12:17:13 +02:00
|
|
|
}
|
|
|
|
|
2024-10-16 09:27:00 +02:00
|
|
|
/// Returns the primary key that has already been set for this index or the
|
|
|
|
/// one we will guess by searching for the first key that contains "id" as a substring,
|
|
|
|
/// and whether the primary key changed
|
2024-09-02 19:39:48 +02:00
|
|
|
/// TODO move this elsewhere
|
2024-09-11 15:59:30 +02:00
|
|
|
pub fn retrieve_or_guess_primary_key<'a>(
|
2024-09-02 19:39:48 +02:00
|
|
|
rtxn: &'a RoTxn<'a>,
|
|
|
|
index: &Index,
|
2024-10-16 09:27:00 +02:00
|
|
|
new_fields_ids_map: &mut FieldsIdsMap,
|
|
|
|
primary_key_from_op: Option<&'a str>,
|
|
|
|
first_document: Option<&'a TopLevelMap<'a>>,
|
|
|
|
) -> Result<StdResult<(PrimaryKey<'a>, bool), UserError>> {
|
|
|
|
// make sure that we have a declared primary key, either fetching it from the index or attempting to guess it.
|
|
|
|
|
|
|
|
// do we have an existing declared primary key?
|
|
|
|
let (primary_key, has_changed) = if let Some(primary_key_from_db) = index.primary_key(rtxn)? {
|
|
|
|
// did we request a primary key in the operation?
|
|
|
|
match primary_key_from_op {
|
|
|
|
// we did, and it is different from the DB one
|
|
|
|
Some(primary_key_from_op) if primary_key_from_op != primary_key_from_db => {
|
|
|
|
// is the index empty?
|
|
|
|
if index.number_of_documents(rtxn)? == 0 {
|
|
|
|
// change primary key
|
|
|
|
(primary_key_from_op, true)
|
|
|
|
} else {
|
|
|
|
return Ok(Err(UserError::PrimaryKeyCannotBeChanged(
|
|
|
|
primary_key_from_db.to_string(),
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => (primary_key_from_db, false),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// no primary key in the DB => let's set one
|
|
|
|
// did we request a primary key in the operation?
|
|
|
|
let primary_key = if let Some(primary_key_from_op) = primary_key_from_op {
|
|
|
|
// set primary key from operation
|
|
|
|
primary_key_from_op
|
|
|
|
} else {
|
|
|
|
// guess primary key
|
2024-09-11 15:59:30 +02:00
|
|
|
let first_document = match first_document {
|
|
|
|
Some(document) => document,
|
2024-10-16 09:27:00 +02:00
|
|
|
// previous indexer when no pk is set + we send an empty payload => index_primary_key_no_candidate_found
|
2024-09-11 15:59:30 +02:00
|
|
|
None => return Ok(Err(UserError::NoPrimaryKeyCandidateFound)),
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut guesses: Vec<&str> = first_document
|
|
|
|
.keys()
|
|
|
|
.map(AsRef::as_ref)
|
|
|
|
.filter(|name| name.to_lowercase().ends_with(DEFAULT_PRIMARY_KEY))
|
2024-09-02 19:39:48 +02:00
|
|
|
.collect();
|
|
|
|
|
2024-09-11 15:59:30 +02:00
|
|
|
// sort the keys in lexicographical order, so that fields are always in the same order.
|
|
|
|
guesses.sort_unstable();
|
2024-09-02 19:39:48 +02:00
|
|
|
|
|
|
|
match guesses.as_slice() {
|
2024-10-16 09:27:00 +02:00
|
|
|
[] => return Ok(Err(UserError::NoPrimaryKeyCandidateFound)),
|
2024-09-11 15:59:30 +02:00
|
|
|
[name] => {
|
2024-09-02 19:39:48 +02:00
|
|
|
tracing::info!("Primary key was not specified in index. Inferred to '{name}'");
|
2024-10-16 09:27:00 +02:00
|
|
|
*name
|
|
|
|
}
|
|
|
|
multiple => {
|
|
|
|
return Ok(Err(UserError::MultiplePrimaryKeyCandidatesFound {
|
|
|
|
candidates: multiple
|
|
|
|
.iter()
|
|
|
|
.map(|candidate| candidate.to_string())
|
|
|
|
.collect(),
|
|
|
|
}))
|
2024-09-02 19:39:48 +02:00
|
|
|
}
|
|
|
|
}
|
2024-10-16 09:27:00 +02:00
|
|
|
};
|
|
|
|
(primary_key, true)
|
|
|
|
};
|
|
|
|
|
|
|
|
match PrimaryKey::new_or_insert(primary_key, new_fields_ids_map) {
|
|
|
|
Ok(primary_key) => Ok(Ok((primary_key, has_changed))),
|
|
|
|
Err(err) => Ok(Err(err)),
|
2024-09-02 19:39:48 +02:00
|
|
|
}
|
|
|
|
}
|