mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-22 10:07:40 +08:00
Fix the errors when using the try_map_try_init method
This commit is contained in:
parent
31de5c747e
commit
f3356ddaa4
@ -1,6 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::num::NonZero;
|
||||
use std::sync::Arc;
|
||||
|
||||
use grenad::{Merger, MergerBuilder};
|
||||
use heed::RoTxn;
|
||||
@ -12,7 +13,7 @@ use crate::update::new::extract::perm_json_p::contained_in;
|
||||
use crate::update::new::{DocumentChange, ItemsPool};
|
||||
use crate::update::{create_sorter, GrenadParameters, MergeDeladdCboRoaringBitmaps};
|
||||
use crate::{
|
||||
bucketed_position, DocumentId, FieldId, GlobalFieldsIdsMap, Index, Result,
|
||||
bucketed_position, DocumentId, Error, FieldId, GlobalFieldsIdsMap, Index, Result,
|
||||
MAX_POSITION_PER_ATTRIBUTE,
|
||||
};
|
||||
|
||||
@ -303,7 +304,9 @@ impl WordDocidsExtractors {
|
||||
index: &Index,
|
||||
fields_ids_map: &GlobalFieldsIdsMap,
|
||||
indexer: GrenadParameters,
|
||||
document_changes: impl IntoParallelIterator<Item = Result<DocumentChange>>,
|
||||
document_changes: impl IntoParallelIterator<
|
||||
Item = std::result::Result<DocumentChange, Arc<Error>>,
|
||||
>,
|
||||
) -> Result<WordDocidsMergers> {
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
|
||||
|
@ -1,8 +1,11 @@
|
||||
use rayon::iter::IndexedParallelIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rayon::iter::{IndexedParallelIterator, ParallelBridge, ParallelIterator};
|
||||
|
||||
use super::DocumentChanges;
|
||||
use crate::documents::{DocumentIdExtractionError, PrimaryKey};
|
||||
use crate::update::concurrent_available_ids::ConcurrentAvailableIds;
|
||||
use crate::update::new::items_pool::ParallelIteratorExt;
|
||||
use crate::update::new::{DocumentChange, Insertion, KvWriterFieldId};
|
||||
use crate::{all_obkv_to_json, Error, FieldsIdsMap, Object, Result, UserError};
|
||||
|
||||
@ -37,7 +40,9 @@ where
|
||||
> {
|
||||
let (fields_ids_map, concurrent_available_ids, primary_key) = param;
|
||||
|
||||
Ok(self.iter.map(|object| {
|
||||
Ok(self.iter.try_map_try_init(
|
||||
|| Ok(()),
|
||||
|_, object| {
|
||||
let docid = match concurrent_available_ids.next() {
|
||||
Some(id) => id,
|
||||
None => return Err(Error::UserError(UserError::DocumentLimitReached)),
|
||||
@ -55,7 +60,9 @@ where
|
||||
let document = writer.into_boxed();
|
||||
let external_docid = match primary_key.document_id(&document, fields_ids_map)? {
|
||||
Ok(document_id) => Ok(document_id),
|
||||
Err(DocumentIdExtractionError::InvalidDocumentId(user_error)) => Err(user_error),
|
||||
Err(DocumentIdExtractionError::InvalidDocumentId(user_error)) => {
|
||||
Err(user_error)
|
||||
}
|
||||
Err(DocumentIdExtractionError::MissingDocumentId) => {
|
||||
Err(UserError::MissingDocumentId {
|
||||
primary_key: primary_key.name().to_string(),
|
||||
@ -72,6 +79,7 @@ where
|
||||
|
||||
let insertion = Insertion::create(docid, document);
|
||||
Ok(DocumentChange::Insertion(insertion))
|
||||
}))
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,9 @@ use crossbeam_channel::{Receiver, Sender, TryRecvError};
|
||||
use rayon::iter::{MapInit, ParallelIterator};
|
||||
|
||||
pub trait ParallelIteratorExt: ParallelIterator {
|
||||
/// A method on a parallel iterator to map
|
||||
/// Maps items based on the init function.
|
||||
///
|
||||
/// The init function is ran only as necessary which is basically once by thread.
|
||||
fn try_map_try_init<F, INIT, T, E, R>(
|
||||
self,
|
||||
init: INIT,
|
||||
|
Loading…
Reference in New Issue
Block a user