Appease *some* of clippy warnings

This commit is contained in:
Louis Dureuil 2024-09-30 16:08:29 +02:00
parent 8df6daf308
commit 64589278ac
No known key found for this signature in database
7 changed files with 13 additions and 12 deletions

View File

@ -122,7 +122,7 @@ impl<'a, 'i> Transform<'a, 'i> {
// We initialize the sorter with the user indexing settings. // We initialize the sorter with the user indexing settings.
let original_sorter = create_sorter( let original_sorter = create_sorter(
grenad::SortAlgorithm::Stable, grenad::SortAlgorithm::Stable,
merge_function.clone(), merge_function,
indexer_settings.chunk_compression_type, indexer_settings.chunk_compression_type,
indexer_settings.chunk_compression_level, indexer_settings.chunk_compression_level,
indexer_settings.max_nb_chunks, indexer_settings.max_nb_chunks,

View File

@ -518,7 +518,7 @@ impl DocumentSender<'_> {
impl Drop for DocumentSender<'_> { impl Drop for DocumentSender<'_> {
fn drop(&mut self) { fn drop(&mut self) {
if let Some(sender) = self.0.take() { if let Some(sender) = self.0.take() {
sender.send(MergerOperation::FinishedDocument); let _ = sender.send(MergerOperation::FinishedDocument);
} }
} }
} }

View File

@ -58,7 +58,7 @@ pub mod perm_json_p {
seeker: &mut impl FnMut(&str, &Value) -> Result<()>, seeker: &mut impl FnMut(&str, &Value) -> Result<()>,
) -> Result<()> { ) -> Result<()> {
if value.is_empty() { if value.is_empty() {
seeker(&base_key, &Value::Object(Map::with_capacity(0)))?; seeker(base_key, &Value::Object(Map::with_capacity(0)))?;
} }
for (key, value) in value.iter() { for (key, value) in value.iter() {
@ -103,7 +103,7 @@ pub mod perm_json_p {
seeker: &mut impl FnMut(&str, &Value) -> Result<()>, seeker: &mut impl FnMut(&str, &Value) -> Result<()>,
) -> Result<()> { ) -> Result<()> {
if values.is_empty() { if values.is_empty() {
seeker(&base_key, &Value::Array(vec![]))?; seeker(base_key, &Value::Array(vec![]))?;
} }
for value in values { for value in values {
@ -128,10 +128,10 @@ pub mod perm_json_p {
) -> bool { ) -> bool {
selectors.map_or(true, |selectors| { selectors.map_or(true, |selectors| {
selectors.iter().any(|selector| { selectors.iter().any(|selector| {
contained_in(selector, &field_name) || contained_in(&field_name, selector) contained_in(selector, field_name) || contained_in(field_name, selector)
}) })
}) && !skip_selectors.iter().any(|skip_selector| { }) && !skip_selectors.iter().any(|skip_selector| {
contained_in(skip_selector, &field_name) || contained_in(&field_name, skip_selector) contained_in(skip_selector, field_name) || contained_in(field_name, skip_selector)
}) })
} }
} }

View File

@ -48,7 +48,7 @@ impl<'a> DocumentTokenizer<'a> {
.entry(field_id) .entry(field_id)
.and_modify(|counter| *counter += MAX_DISTANCE) .and_modify(|counter| *counter += MAX_DISTANCE)
.or_insert(0); .or_insert(0);
if *position as u32 >= self.max_positions_per_attributes { if *position >= self.max_positions_per_attributes {
return Ok(()); return Ok(());
} }
@ -72,7 +72,7 @@ impl<'a> DocumentTokenizer<'a> {
*position, *position,
self.tokenizer.tokenize_with_allow_list(text.as_str(), locales), self.tokenizer.tokenize_with_allow_list(text.as_str(), locales),
) )
.take_while(|(p, _)| (*p as u32) < self.max_positions_per_attributes); .take_while(|(p, _)| *p < self.max_positions_per_attributes);
for (index, token) in tokens { for (index, token) in tokens {
// keep a word only if it is not empty and fit in a LMDB key. // keep a word only if it is not empty and fit in a LMDB key.

View File

@ -2,7 +2,7 @@ use std::borrow::Cow;
use std::collections::{BTreeMap, HashMap}; use std::collections::{BTreeMap, HashMap};
use std::sync::Arc; use std::sync::Arc;
use heed::types::{Bytes, DecodeIgnore}; use heed::types::Bytes;
use heed::RoTxn; use heed::RoTxn;
use memmap2::Mmap; use memmap2::Mmap;
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};

View File

@ -6,7 +6,7 @@ pub use document_deletion::DocumentDeletion;
pub use document_operation::DocumentOperation; pub use document_operation::DocumentOperation;
use heed::{RoTxn, RwTxn}; use heed::{RoTxn, RwTxn};
pub use partial_dump::PartialDump; pub use partial_dump::PartialDump;
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; use rayon::iter::{IndexedParallelIterator, IntoParallelIterator};
use rayon::ThreadPool; use rayon::ThreadPool;
pub use update_by_function::UpdateByFunction; pub use update_by_function::UpdateByFunction;
@ -229,7 +229,8 @@ fn extract_and_send_docids<E: DocidsExtractor, D: MergerOperationType>(
sender: &ExtractorSender, sender: &ExtractorSender,
) -> Result<()> { ) -> Result<()> {
let merger = E::run_extraction(index, fields_ids_map, indexer, document_changes)?; let merger = E::run_extraction(index, fields_ids_map, indexer, document_changes)?;
Ok(sender.send_searchable::<D>(merger).unwrap()) sender.send_searchable::<D>(merger).unwrap();
Ok(())
} }
/// Returns the primary key *field id* that has already been set for this index or the /// Returns the primary key *field id* that has already been set for this index or the

View File

@ -1,4 +1,4 @@
use rayon::iter::{IndexedParallelIterator, ParallelBridge, ParallelIterator}; use rayon::iter::IndexedParallelIterator;
use super::DocumentChanges; use super::DocumentChanges;
use crate::documents::{DocumentIdExtractionError, PrimaryKey}; use crate::documents::{DocumentIdExtractionError, PrimaryKey};