Create a new struct to reduce the type complexity

This commit is contained in:
Clément Renault 2023-03-09 13:21:21 +01:00
parent e064c52544
commit 0ad53784e7
No known key found for this signature in database
GPG Key ID: 92ADA4E935E71FA4
2 changed files with 18 additions and 12 deletions

View File

@ -16,18 +16,24 @@ use crate::facet::value_encoding::f64_into_bytes;
use crate::update::index_documents::{create_writer, writer_into_reader}; use crate::update::index_documents::{create_writer, writer_into_reader};
use crate::{CboRoaringBitmapCodec, DocumentId, FieldId, Result, BEU32, MAX_FACET_VALUE_LENGTH}; use crate::{CboRoaringBitmapCodec, DocumentId, FieldId, Result, BEU32, MAX_FACET_VALUE_LENGTH};
/// The extracted facet values stored in grenad files by type.
pub struct ExtractedFacetValues {
pub docid_fid_facet_numbers_chunk: grenad::Reader<File>,
pub docid_fid_facet_strings_chunk: grenad::Reader<File>,
pub fid_facet_is_null_docids_chunk: grenad::Reader<File>,
pub fid_facet_exists_docids_chunk: grenad::Reader<File>,
}
/// Extracts the facet values of each faceted field of each document. /// Extracts the facet values of each faceted field of each document.
/// ///
/// Returns the generated grenad reader containing the docid the fid and the orginal value as key /// Returns the generated grenad reader containing the docid the fid and the orginal value as key
/// and the normalized value as value extracted from the given chunk of documents. /// and the normalized value as value extracted from the given chunk of documents.
#[logging_timer::time] #[logging_timer::time]
#[allow(clippy::type_complexity)]
pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>( pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
obkv_documents: grenad::Reader<R>, obkv_documents: grenad::Reader<R>,
indexer: GrenadParameters, indexer: GrenadParameters,
faceted_fields: &HashSet<FieldId>, faceted_fields: &HashSet<FieldId>,
) -> Result<(grenad::Reader<File>, grenad::Reader<File>, grenad::Reader<File>, grenad::Reader<File>)> ) -> Result<ExtractedFacetValues> {
{
let max_memory = indexer.max_memory_by_thread(); let max_memory = indexer.max_memory_by_thread();
let mut fid_docid_facet_numbers_sorter = create_sorter( let mut fid_docid_facet_numbers_sorter = create_sorter(
@ -134,12 +140,12 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
} }
let facet_is_null_docids_reader = writer_into_reader(facet_is_null_docids_writer)?; let facet_is_null_docids_reader = writer_into_reader(facet_is_null_docids_writer)?;
Ok(( Ok(ExtractedFacetValues {
sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?, docid_fid_facet_numbers_chunk: sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?,
sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?, docid_fid_facet_strings_chunk: sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?,
facet_is_null_docids_reader, fid_facet_is_null_docids_chunk: facet_is_null_docids_reader,
facet_exists_docids_reader, fid_facet_exists_docids_chunk: facet_exists_docids_reader,
)) })
} }
/// Represent what a document field contains. /// Represent what a document field contains.

View File

@ -18,7 +18,7 @@ use rayon::prelude::*;
use self::extract_docid_word_positions::extract_docid_word_positions; use self::extract_docid_word_positions::extract_docid_word_positions;
use self::extract_facet_number_docids::extract_facet_number_docids; use self::extract_facet_number_docids::extract_facet_number_docids;
use self::extract_facet_string_docids::extract_facet_string_docids; use self::extract_facet_string_docids::extract_facet_string_docids;
use self::extract_fid_docid_facet_values::extract_fid_docid_facet_values; use self::extract_fid_docid_facet_values::{extract_fid_docid_facet_values, ExtractedFacetValues};
use self::extract_fid_word_count_docids::extract_fid_word_count_docids; use self::extract_fid_word_count_docids::extract_fid_word_count_docids;
use self::extract_geo_points::extract_geo_points; use self::extract_geo_points::extract_geo_points;
use self::extract_word_docids::extract_word_docids; use self::extract_word_docids::extract_word_docids;
@ -300,12 +300,12 @@ fn send_and_extract_flattened_documents_data(
Ok(docid_word_positions_chunk) Ok(docid_word_positions_chunk)
}, },
|| { || {
let ( let ExtractedFacetValues {
docid_fid_facet_numbers_chunk, docid_fid_facet_numbers_chunk,
docid_fid_facet_strings_chunk, docid_fid_facet_strings_chunk,
fid_facet_is_null_docids_chunk, fid_facet_is_null_docids_chunk,
fid_facet_exists_docids_chunk, fid_facet_exists_docids_chunk,
) = extract_fid_docid_facet_values( } = extract_fid_docid_facet_values(
flattened_documents_chunk.clone(), flattened_documents_chunk.clone(),
indexer, indexer,
faceted_fields, faceted_fields,