diff --git a/filter-parser/src/condition.rs b/filter-parser/src/condition.rs index e967bd074..735ffec0e 100644 --- a/filter-parser/src/condition.rs +++ b/filter-parser/src/condition.rs @@ -48,17 +48,14 @@ pub fn parse_condition(input: Span) -> IResult { pub fn parse_exists(input: Span) -> IResult { let (input, key) = terminated(parse_value, tag("EXISTS"))(input)?; - Ok((input, FilterCondition::Condition { fid: key.into(), op: Exists })) + Ok((input, FilterCondition::Condition { fid: key, op: Exists })) } /// exist = value "NOT" WS+ "EXISTS" pub fn parse_not_exists(input: Span) -> IResult { let (input, key) = parse_value(input)?; let (input, _) = tuple((tag("NOT"), multispace1, tag("EXISTS")))(input)?; - Ok(( - input, - FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key.into(), op: Exists })), - )) + Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists })))) } /// to = value value "TO" WS+ value diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 3af270557..8c1431d93 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -168,7 +168,7 @@ fn ws<'a, O>(inner: impl FnMut(Span<'a>) -> IResult) -> impl FnMut(Span<'a>) } /// value_list = (value ("," value)* ","?)? -fn parse_value_list<'a>(input: Span<'a>) -> IResult>> { +fn parse_value_list(input: Span) -> IResult> { let (input, first_value) = opt(parse_value)(input)?; if let Some(first_value) = first_value { let value_list_el_parser = preceded(ws(tag(",")), parse_value); @@ -335,13 +335,11 @@ fn parse_error_reserved_keyword(input: Span) -> IResult { Ok(result) => Ok(result), Err(nom::Err::Error(inner) | nom::Err::Failure(inner)) => match inner.kind() { ErrorKind::ExpectedValue(ExpectedValueKind::ReservedKeyword) => { - return Err(nom::Err::Failure(inner)); + Err(nom::Err::Failure(inner)) } - _ => return Err(nom::Err::Error(inner)), + _ => Err(nom::Err::Error(inner)), }, - Err(e) => { - return Err(e); - } + Err(e) => Err(e), } } @@ -401,7 +399,7 @@ pub mod tests { fn parse() { use FilterCondition as Fc; - fn p<'a>(s: &'a str) -> impl std::fmt::Display + 'a { + fn p(s: &str) -> impl std::fmt::Display + '_ { Fc::parse(s).unwrap().unwrap() } @@ -494,7 +492,7 @@ pub mod tests { fn error() { use FilterCondition as Fc; - fn p<'a>(s: &'a str) -> impl std::fmt::Display + 'a { + fn p(s: &str) -> impl std::fmt::Display + '_ { Fc::parse(s).unwrap_err().to_string() } diff --git a/filter-parser/src/value.rs b/filter-parser/src/value.rs index d015018c1..73ef61480 100644 --- a/filter-parser/src/value.rs +++ b/filter-parser/src/value.rs @@ -78,7 +78,7 @@ pub fn word_exact<'a, 'b: 'a>(tag: &'b str) -> impl Fn(Span<'a>) -> IResult<'a, } /// value = WS* ( word | singleQuoted | doubleQuoted) WS+ -pub fn parse_value<'a>(input: Span<'a>) -> IResult> { +pub fn parse_value(input: Span) -> IResult { // to get better diagnostic message we are going to strip the left whitespaces from the input right now let (input, _) = take_while(char::is_whitespace)(input)?; diff --git a/milli/src/heed_codec/facet/mod.rs b/milli/src/heed_codec/facet/mod.rs index 4609bfe7f..d36ec8434 100644 --- a/milli/src/heed_codec/facet/mod.rs +++ b/milli/src/heed_codec/facet/mod.rs @@ -88,8 +88,7 @@ impl<'a> heed::BytesEncode<'a> for FacetGroupValueCodec { type EItem = FacetGroupValue; fn bytes_encode(value: &'a Self::EItem) -> Option> { - let mut v = vec![]; - v.push(value.size); + let mut v = vec![value.size]; CboRoaringBitmapCodec::serialize_into(&value.bitmap, &mut v); Some(Cow::Owned(v)) } diff --git a/milli/src/index.rs b/milli/src/index.rs index 8b1e4d8ff..5910a305c 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -1234,7 +1234,7 @@ pub(crate) mod tests { { let builder = IndexDocuments::new( wtxn, - &self, + self, &self.indexer_config, self.index_documents_config.clone(), |_| (), diff --git a/milli/src/lib.rs b/milli/src/lib.rs index 28f048b8a..c33aae9eb 100644 --- a/milli/src/lib.rs +++ b/milli/src/lib.rs @@ -1,6 +1,4 @@ #![cfg_attr(all(test, fuzzing), feature(no_coverage))] -#![allow(clippy::reversed_empty_ranges)] -#![allow(clippy::too_many_arguments)] #[macro_use] pub mod documents; diff --git a/milli/src/search/criteria/asc_desc.rs b/milli/src/search/criteria/asc_desc.rs index 92c73709b..fbcf1d3fe 100644 --- a/milli/src/search/criteria/asc_desc.rs +++ b/milli/src/search/criteria/asc_desc.rs @@ -242,6 +242,7 @@ fn iterative_facet_number_ordered_iter<'t>( // The itertools GroupBy iterator doesn't provide an owned version, we are therefore // required to collect the result into an owned collection (a Vec). // https://github.com/rust-itertools/itertools/issues/499 + #[allow(clippy::needless_collect)] let vec: Vec<_> = iter .group_by(|(_, v)| *v) .into_iter() @@ -284,6 +285,7 @@ fn iterative_facet_string_ordered_iter<'t>( // The itertools GroupBy iterator doesn't provide an owned version, we are therefore // required to collect the result into an owned collection (a Vec). // https://github.com/rust-itertools/itertools/issues/499 + #[allow(clippy::needless_collect)] let vec: Vec<_> = iter .group_by(|(_, v)| *v) .into_iter() diff --git a/milli/src/search/criteria/attribute.rs b/milli/src/search/criteria/attribute.rs index 679381838..fd567a7ac 100644 --- a/milli/src/search/criteria/attribute.rs +++ b/milli/src/search/criteria/attribute.rs @@ -179,6 +179,7 @@ impl<'t> Criterion for Attribute<'t> { /// QueryPositionIterator is an Iterator over positions of a Query, /// It contains iterators over words positions. struct QueryPositionIterator<'t> { + #[allow(clippy::type_complexity)] inner: Vec> + 't>>>, } diff --git a/milli/src/search/criteria/mod.rs b/milli/src/search/criteria/mod.rs index 3657df73e..8d0e3af05 100644 --- a/milli/src/search/criteria/mod.rs +++ b/milli/src/search/criteria/mod.rs @@ -96,6 +96,7 @@ pub trait Context<'c> { &self, docid: DocumentId, ) -> heed::Result>; + #[allow(clippy::type_complexity)] fn word_position_iterator( &self, word: &str, @@ -883,7 +884,7 @@ pub mod test { let mut keys = word_docids.keys().collect::>(); keys.sort_unstable(); - let words_fst = fst::Set::from_iter(keys).unwrap().map_data(|v| Cow::Owned(v)).unwrap(); + let words_fst = fst::Set::from_iter(keys).unwrap().map_data(Cow::Owned).unwrap(); TestContext { words_fst, diff --git a/milli/src/search/distinct/facet_distinct.rs b/milli/src/search/distinct/facet_distinct.rs index 1725346be..3ed683823 100644 --- a/milli/src/search/distinct/facet_distinct.rs +++ b/milli/src/search/distinct/facet_distinct.rs @@ -123,6 +123,7 @@ impl<'a> FacetDistinctIter<'a> { } } +#[allow(clippy::drop_non_drop)] fn facet_values_prefix_key(distinct: FieldId, id: DocumentId) -> [u8; FID_SIZE + DOCID_SIZE] { concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes()) } diff --git a/milli/src/search/distinct/mod.rs b/milli/src/search/distinct/mod.rs index b6ed26917..3a46bb469 100644 --- a/milli/src/search/distinct/mod.rs +++ b/milli/src/search/distinct/mod.rs @@ -110,7 +110,7 @@ mod test { addition.execute().unwrap(); let fields_map = index.fields_ids_map(&txn).unwrap(); - let fid = fields_map.id(&distinct).unwrap(); + let fid = fields_map.id(distinct).unwrap(); let documents = DocumentsBatchReader::from_reader(Cursor::new(JSON.as_slice())).unwrap(); let map = (0..documents.documents_count() as u32).collect(); @@ -133,7 +133,7 @@ mod test { let s = value.to_string(); assert!(seen.insert(s)); } - Value::Array(values) => values.into_iter().for_each(|value| test(seen, value)), + Value::Array(values) => values.iter().for_each(|value| test(seen, value)), } } diff --git a/milli/src/search/facet/facet_distribution_iter.rs b/milli/src/search/facet/facet_distribution_iter.rs index 9cd85b667..6e209c7aa 100644 --- a/milli/src/search/facet/facet_distribution_iter.rs +++ b/milli/src/search/facet/facet_distribution_iter.rs @@ -38,9 +38,9 @@ where if let Some(first_bound) = get_first_facet_value::(rtxn, db, field_id)? { fd.iterate(candidates, highest_level, first_bound, usize::MAX)?; - return Ok(()); + Ok(()) } else { - return Ok(()); + Ok(()) } } @@ -84,7 +84,7 @@ where } } } - return Ok(ControlFlow::Continue(())); + Ok(ControlFlow::Continue(())) } fn iterate( &mut self, @@ -98,7 +98,7 @@ where } let starting_key = FacetGroupKey { field_id: self.field_id, level, left_bound: starting_bound }; - let iter = self.db.range(&self.rtxn, &(&starting_key..)).unwrap().take(group_size); + let iter = self.db.range(self.rtxn, &(&starting_key..)).unwrap().take(group_size); for el in iter { let (key, value) = el.unwrap(); @@ -108,7 +108,7 @@ where return Ok(ControlFlow::Break(())); } let docids_in_common = value.bitmap & candidates; - if docids_in_common.len() > 0 { + if !docids_in_common.is_empty() { let cf = self.iterate( &docids_in_common, level - 1, @@ -121,8 +121,7 @@ where } } } - - return Ok(ControlFlow::Continue(())); + Ok(ControlFlow::Continue(())) } } diff --git a/milli/src/search/facet/facet_range_search.rs b/milli/src/search/facet/facet_range_search.rs index 07300e920..e8eeab1cc 100644 --- a/milli/src/search/facet/facet_range_search.rs +++ b/milli/src/search/facet/facet_range_search.rs @@ -60,7 +60,7 @@ where f.run(highest_level, starting_left_bound, rightmost_bound, group_size)?; Ok(()) } else { - return Ok(()); + Ok(()) } } @@ -77,7 +77,7 @@ impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> { fn run_level_0(&mut self, starting_left_bound: &'t [u8], group_size: usize) -> Result<()> { let left_key = FacetGroupKey { field_id: self.field_id, level: 0, left_bound: starting_left_bound }; - let iter = self.db.range(&self.rtxn, &(left_key..))?.take(group_size); + let iter = self.db.range(self.rtxn, &(left_key..))?.take(group_size); for el in iter { let (key, value) = el?; // the right side of the iter range is unbounded, so we need to make sure that we are not iterating @@ -145,7 +145,7 @@ impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> { let left_key = FacetGroupKey { field_id: self.field_id, level, left_bound: starting_left_bound }; - let mut iter = self.db.range(&self.rtxn, &(left_key..))?.take(group_size); + let mut iter = self.db.range(self.rtxn, &(left_key..))?.take(group_size); // We iterate over the range while keeping in memory the previous value let (mut previous_key, mut previous_value) = iter.next().unwrap()?; @@ -348,6 +348,7 @@ mod tests { &mut docids, ) .unwrap(); + #[allow(clippy::format_push_string)] results.push_str(&format!("{}\n", display_bitmap(&docids))); } milli_snap!(results, format!("included_{i}")); @@ -366,6 +367,7 @@ mod tests { &mut docids, ) .unwrap(); + #[allow(clippy::format_push_string)] results.push_str(&format!("{}\n", display_bitmap(&docids))); } milli_snap!(results, format!("excluded_{i}")); diff --git a/milli/src/search/facet/facet_sort_ascending.rs b/milli/src/search/facet/facet_sort_ascending.rs index 2f1f73db3..552795981 100644 --- a/milli/src/search/facet/facet_sort_ascending.rs +++ b/milli/src/search/facet/facet_sort_ascending.rs @@ -50,6 +50,7 @@ struct AscendingFacetSort<'t, 'e> { rtxn: &'t heed::RoTxn<'e>, db: heed::Database, FacetGroupValueCodec>, field_id: u16, + #[allow(clippy::type_complexity)] stack: Vec<( RoaringBitmap, std::iter::Take< @@ -91,9 +92,9 @@ impl<'t, 'e> Iterator for AscendingFacetSort<'t, 'e> { } let starting_key_below = FacetGroupKey { field_id: self.field_id, level: level - 1, left_bound }; - let iter = match self.db.range(&self.rtxn, &(starting_key_below..)) { + let iter = match self.db.range(self.rtxn, &(starting_key_below..)) { Ok(iter) => iter, - Err(e) => return Some(Err(e.into())), + Err(e) => return Some(Err(e)), } .take(group_size as usize); diff --git a/milli/src/search/facet/facet_sort_descending.rs b/milli/src/search/facet/facet_sort_descending.rs index 5f09d708b..12767c64d 100644 --- a/milli/src/search/facet/facet_sort_descending.rs +++ b/milli/src/search/facet/facet_sort_descending.rs @@ -39,6 +39,7 @@ struct DescendingFacetSort<'t> { rtxn: &'t heed::RoTxn<'t>, db: heed::Database, FacetGroupValueCodec>, field_id: u16, + #[allow(clippy::type_complexity)] stack: Vec<( RoaringBitmap, std::iter::Take< @@ -54,7 +55,7 @@ impl<'t> Iterator for DescendingFacetSort<'t> { fn next(&mut self) -> Option { 'outer: loop { let (documents_ids, deepest_iter, right_bound) = self.stack.last_mut()?; - while let Some(result) = deepest_iter.next() { + for result in deepest_iter.by_ref() { let ( FacetGroupKey { level, left_bound, field_id }, FacetGroupValue { size: group_size, mut bitmap }, @@ -99,12 +100,10 @@ impl<'t> Iterator for DescendingFacetSort<'t> { let iter = match self .db .remap_key_type::>() - .rev_range( - &self.rtxn, - &(Bound::Included(starting_key_below), end_key_kelow), - ) { + .rev_range(self.rtxn, &(Bound::Included(starting_key_below), end_key_kelow)) + { Ok(iter) => iter, - Err(e) => return Some(Err(e.into())), + Err(e) => return Some(Err(e)), } .take(group_size as usize); diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index 1dc01566e..5da1ba7fd 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -100,10 +100,10 @@ impl<'a> Filter<'a> { } } - if ors.len() > 1 { - ands.push(FilterCondition::Or(ors)); - } else if ors.len() == 1 { - ands.push(ors.pop().unwrap()); + match ors.len() { + 0 => (), + 1 => ands.push(ors.pop().unwrap()), + _ => ands.push(FilterCondition::Or(ors)), } } Either::Right(rule) => { @@ -128,6 +128,7 @@ impl<'a> Filter<'a> { Ok(Some(Self { condition: and })) } + #[allow(clippy::should_implement_trait)] pub fn from_str(expression: &'a str) -> Result> { let condition = match FilterCondition::parse(expression) { Ok(Some(fc)) => Ok(fc), diff --git a/milli/src/search/facet/mod.rs b/milli/src/search/facet/mod.rs index ccf40d6aa..7dfdcdb94 100644 --- a/milli/src/search/facet/mod.rs +++ b/milli/src/search/facet/mod.rs @@ -73,7 +73,7 @@ pub(crate) fn get_highest_level<'t>( let field_id_prefix = &field_id.to_be_bytes(); Ok(db .as_polymorph() - .rev_prefix_iter::<_, ByteSlice, DecodeIgnore>(&txn, field_id_prefix)? + .rev_prefix_iter::<_, ByteSlice, DecodeIgnore>(txn, field_id_prefix)? .next() .map(|el| { let (key, _) = el.unwrap(); @@ -105,12 +105,9 @@ pub(crate) mod tests { pub fn get_random_looking_index() -> FacetIndex { let index = FacetIndex::::new(4, 8, 5); let mut txn = index.env.write_txn().unwrap(); - let mut rng = rand::rngs::SmallRng::from_seed([0; 32]); - let keys = - std::iter::from_fn(|| Some(rng.gen_range(0..256))).take(128).collect::>(); - for (_i, key) in keys.into_iter().enumerate() { + for (_i, key) in std::iter::from_fn(|| Some(rng.gen_range(0..256))).take(128).enumerate() { let mut bitmap = RoaringBitmap::new(); bitmap.insert(key); bitmap.insert(key + 100); diff --git a/milli/src/search/matches/mod.rs b/milli/src/search/matches/mod.rs index b76ddef99..ec47f848d 100644 --- a/milli/src/search/matches/mod.rs +++ b/milli/src/search/matches/mod.rs @@ -125,10 +125,7 @@ impl<'t, A: AsRef<[u8]>> Matcher<'t, '_, A> { words_positions: &mut impl Iterator)>, matches: &mut Vec, ) -> bool { - let mut potential_matches = Vec::new(); - - // Add first match to potential matches. - potential_matches.push((token_position, word_position, partial.char_len())); + let mut potential_matches = vec![(token_position, word_position, partial.char_len())]; for (token_position, word_position, word) in words_positions { partial = match partial.match_token(word) { diff --git a/milli/src/snapshot_tests.rs b/milli/src/snapshot_tests.rs index bcb9805ea..46972deba 100644 --- a/milli/src/snapshot_tests.rs +++ b/milli/src/snapshot_tests.rs @@ -314,8 +314,8 @@ pub fn snap_field_id_docid_facet_strings(index: &Index) -> String { pub fn snap_documents_ids(index: &Index) -> String { let rtxn = index.read_txn().unwrap(); let documents_ids = index.documents_ids(&rtxn).unwrap(); - let snap = display_bitmap(&documents_ids); - snap + + display_bitmap(&documents_ids) } pub fn snap_stop_words(index: &Index) -> String { let rtxn = index.read_txn().unwrap(); @@ -326,8 +326,8 @@ pub fn snap_stop_words(index: &Index) -> String { pub fn snap_soft_deleted_documents_ids(index: &Index) -> String { let rtxn = index.read_txn().unwrap(); let soft_deleted_documents_ids = index.soft_deleted_documents_ids(&rtxn).unwrap(); - let soft_deleted_documents_ids = display_bitmap(&soft_deleted_documents_ids); - soft_deleted_documents_ids + + display_bitmap(&soft_deleted_documents_ids) } pub fn snap_field_distributions(index: &Index) -> String { let rtxn = index.read_txn().unwrap(); @@ -350,8 +350,8 @@ pub fn snap_fields_ids_map(index: &Index) -> String { pub fn snap_geo_faceted_documents_ids(index: &Index) -> String { let rtxn = index.read_txn().unwrap(); let geo_faceted_documents_ids = index.geo_faceted_documents_ids(&rtxn).unwrap(); - let snap = display_bitmap(&geo_faceted_documents_ids); - snap + + display_bitmap(&geo_faceted_documents_ids) } pub fn snap_external_documents_ids(index: &Index) -> String { let rtxn = index.read_txn().unwrap(); diff --git a/milli/src/update/available_documents_ids.rs b/milli/src/update/available_documents_ids.rs index 3e4ec5600..784bee5a7 100644 --- a/milli/src/update/available_documents_ids.rs +++ b/milli/src/update/available_documents_ids.rs @@ -21,6 +21,7 @@ impl AvailableDocumentsIds { let iter = match last_id.checked_add(1) { Some(id) => id..=u32::max_value(), + #[allow(clippy::reversed_empty_ranges)] None => 1..=0, // empty range iterator }; diff --git a/milli/src/update/delete_documents.rs b/milli/src/update/delete_documents.rs index f1341c48c..a6a4ea609 100644 --- a/milli/src/update/delete_documents.rs +++ b/milli/src/update/delete_documents.rs @@ -138,7 +138,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { // the `soft_deleted_documents_ids` bitmap and early exit. let size_used = self.index.used_size()?; let map_size = self.index.env.map_size()? as u64; - let nb_documents = self.index.number_of_documents(&self.wtxn)?; + let nb_documents = self.index.number_of_documents(self.wtxn)?; let nb_soft_deleted = soft_deleted_docids.len(); let percentage_available = 100 - (size_used * 100 / map_size); @@ -474,7 +474,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { self.index.put_faceted_documents_ids(self.wtxn, field_id, facet_type, &docids)?; let facet_values = remove_docids_from_field_id_docid_facet_value( - &self.index, + self.index, self.wtxn, facet_type, field_id, @@ -641,7 +641,7 @@ mod tests { external_ids: &[&str], disable_soft_deletion: bool, ) -> Vec { - let external_document_ids = index.external_documents_ids(&wtxn).unwrap(); + let external_document_ids = index.external_documents_ids(wtxn).unwrap(); let ids_to_delete: Vec = external_ids .iter() .map(|id| external_document_ids.get(id.as_bytes()).unwrap()) @@ -858,7 +858,7 @@ mod tests { assert!(!results.documents_ids.is_empty()); for id in results.documents_ids.iter() { assert!( - !deleted_internal_ids.contains(&id), + !deleted_internal_ids.contains(id), "The document {} was supposed to be deleted", id ); @@ -922,7 +922,7 @@ mod tests { assert!(!results.documents_ids.is_empty()); for id in results.documents_ids.iter() { assert!( - !deleted_internal_ids.contains(&id), + !deleted_internal_ids.contains(id), "The document {} was supposed to be deleted", id ); @@ -986,7 +986,7 @@ mod tests { assert!(!results.documents_ids.is_empty()); for id in results.documents_ids.iter() { assert!( - !deleted_internal_ids.contains(&id), + !deleted_internal_ids.contains(id), "The document {} was supposed to be deleted", id ); diff --git a/milli/src/update/facet/bulk.rs b/milli/src/update/facet/bulk.rs index ea0a7d3d7..317a7af9b 100644 --- a/milli/src/update/facet/bulk.rs +++ b/milli/src/update/facet/bulk.rs @@ -111,7 +111,7 @@ impl FacetsUpdateBulkInner { } for &field_id in field_ids.iter() { - let (level_readers, all_docids) = self.compute_levels_for_field_id(field_id, &wtxn)?; + let (level_readers, all_docids) = self.compute_levels_for_field_id(field_id, wtxn)?; handle_all_docids(wtxn, field_id, all_docids)?; @@ -192,6 +192,7 @@ impl FacetsUpdateBulkInner { Ok((subwriters, all_docids)) } + #[allow(clippy::type_complexity)] fn read_level_0<'t>( &self, rtxn: &'t RoTxn, @@ -245,6 +246,7 @@ impl FacetsUpdateBulkInner { /// ## Returns: /// A vector of grenad::Reader. The reader at index `i` corresponds to the elements of level `i + 1` /// that must be inserted into the database. + #[allow(clippy::type_complexity)] fn compute_higher_levels<'t>( &self, rtxn: &'t RoTxn, @@ -341,7 +343,7 @@ impl FacetsUpdateBulkInner { handle_group(&bitmaps, left_bounds.first().unwrap())?; } } - return Ok(sub_writers); + Ok(sub_writers) } } diff --git a/milli/src/update/facet/delete.rs b/milli/src/update/facet/delete.rs index 2bc54c7c1..9bec2d911 100644 --- a/milli/src/update/facet/delete.rs +++ b/milli/src/update/facet/delete.rs @@ -100,7 +100,7 @@ impl<'i, 'b> FacetsDelete<'i, 'b> { max_group_size: self.max_group_size, }; for facet_value in affected_facet_values { - inc.delete(wtxn, field_id, facet_value.as_slice(), &self.docids_to_delete)?; + inc.delete(wtxn, field_id, facet_value.as_slice(), self.docids_to_delete)?; } } } diff --git a/milli/src/update/facet/incremental.rs b/milli/src/update/facet/incremental.rs index 2558c81a3..c6735224d 100644 --- a/milli/src/update/facet/incremental.rs +++ b/milli/src/update/facet/incremental.rs @@ -127,7 +127,7 @@ impl FacetsUpdateIncrementalInner { if let Some(e) = prefix_iter.next() { let (key_bytes, value) = e?; Ok(( - FacetGroupKeyCodec::::bytes_decode(&key_bytes) + FacetGroupKeyCodec::::bytes_decode(key_bytes) .ok_or(Error::Encoding)? .into_owned(), value, @@ -146,11 +146,11 @@ impl FacetsUpdateIncrementalInner { .as_polymorph() .prefix_iter::<_, ByteSlice, FacetGroupValueCodec>( txn, - &prefix.as_slice(), + prefix.as_slice(), )?; let (key_bytes, value) = iter.next().unwrap()?; Ok(( - FacetGroupKeyCodec::::bytes_decode(&key_bytes) + FacetGroupKeyCodec::::bytes_decode(key_bytes) .ok_or(Error::Encoding)? .into_owned(), value, @@ -185,15 +185,15 @@ impl FacetsUpdateIncrementalInner { let mut iter = self .db .as_polymorph() - .prefix_iter::<_, ByteSlice, DecodeIgnore>(&txn, &level0_prefix)?; + .prefix_iter::<_, ByteSlice, DecodeIgnore>(txn, &level0_prefix)?; if iter.next().is_none() { drop(iter); self.db.put(txn, &key, &value)?; - return Ok(InsertionResult::Insert); + Ok(InsertionResult::Insert) } else { drop(iter); - let old_value = self.db.get(&txn, &key)?; + let old_value = self.db.get(txn, &key)?; match old_value { Some(mut updated_value) => { // now merge the two @@ -236,7 +236,7 @@ impl FacetsUpdateIncrementalInner { let max_group_size = self.max_group_size; - let result = self.insert_in_level(txn, field_id, level - 1, facet_value.clone(), docids)?; + let result = self.insert_in_level(txn, field_id, level - 1, facet_value, docids)?; // level below inserted an element let (insertion_key, insertion_value) = @@ -312,13 +312,13 @@ impl FacetsUpdateIncrementalInner { }; let mut iter = - self.db.range(&txn, &(start_key..))?.take((size_left as usize) + (size_right as usize)); + self.db.range(txn, &(start_key..))?.take((size_left as usize) + (size_right as usize)); let group_left = { let mut values_left = RoaringBitmap::new(); let mut i = 0; - while let Some(next) = iter.next() { + for next in iter.by_ref() { let (_key, value) = next?; i += 1; values_left |= &value.bitmap; @@ -339,7 +339,7 @@ impl FacetsUpdateIncrementalInner { FacetGroupValue { bitmap: mut values_right, .. }, ) = iter.next().unwrap()?; - while let Some(next) = iter.next() { + for next in iter.by_ref() { let (_, value) = next?; values_right |= &value.bitmap; } @@ -359,7 +359,7 @@ impl FacetsUpdateIncrementalInner { } /// Insert the given facet value and corresponding document ids in the database. - pub fn insert<'a, 't>( + pub fn insert<'t>( &self, txn: &'t mut RwTxn, field_id: u16, @@ -371,7 +371,7 @@ impl FacetsUpdateIncrementalInner { } let group_size = self.group_size; - let highest_level = get_highest_level(&txn, self.db, field_id)?; + let highest_level = get_highest_level(txn, self.db, field_id)?; let result = self.insert_in_level(txn, field_id, highest_level as u8, facet_value, docids)?; @@ -391,7 +391,7 @@ impl FacetsUpdateIncrementalInner { let size_highest_level = self .db .as_polymorph() - .prefix_iter::<_, ByteSlice, ByteSlice>(&txn, &highest_level_prefix)? + .prefix_iter::<_, ByteSlice, ByteSlice>(txn, &highest_level_prefix)? .count(); if size_highest_level < self.group_size as usize * self.min_level_size as usize { @@ -401,7 +401,7 @@ impl FacetsUpdateIncrementalInner { let mut groups_iter = self .db .as_polymorph() - .prefix_iter::<_, ByteSlice, FacetGroupValueCodec>(&txn, &highest_level_prefix)?; + .prefix_iter::<_, ByteSlice, FacetGroupValueCodec>(txn, &highest_level_prefix)?; let nbr_new_groups = size_highest_level / self.group_size as usize; let nbr_leftover_elements = size_highest_level % self.group_size as usize; @@ -412,7 +412,7 @@ impl FacetsUpdateIncrementalInner { let mut values = RoaringBitmap::new(); for _ in 0..group_size { let (key_bytes, value_i) = groups_iter.next().unwrap()?; - let key_i = FacetGroupKeyCodec::::bytes_decode(&key_bytes) + let key_i = FacetGroupKeyCodec::::bytes_decode(key_bytes) .ok_or(Error::Encoding)?; if first_key.is_none() { @@ -435,7 +435,7 @@ impl FacetsUpdateIncrementalInner { let mut values = RoaringBitmap::new(); for _ in 0..nbr_leftover_elements { let (key_bytes, value_i) = groups_iter.next().unwrap()?; - let key_i = FacetGroupKeyCodec::::bytes_decode(&key_bytes) + let key_i = FacetGroupKeyCodec::::bytes_decode(key_bytes) .ok_or(Error::Encoding)?; if first_key.is_none() { @@ -494,7 +494,7 @@ impl FacetsUpdateIncrementalInner { let (deletion_key, mut bitmap) = self.find_insertion_key_value(field_id, level, facet_value, txn)?; - let result = self.delete_in_level(txn, field_id, level - 1, facet_value.clone(), docids)?; + let result = self.delete_in_level(txn, field_id, level - 1, facet_value, docids)?; let mut decrease_size = false; let next_key = match result { @@ -547,13 +547,13 @@ impl FacetsUpdateIncrementalInner { docids: &RoaringBitmap, ) -> Result { let key = FacetGroupKey { field_id, level: 0, left_bound: facet_value }; - let mut bitmap = self.db.get(&txn, &key)?.unwrap().bitmap; + let mut bitmap = self.db.get(txn, &key)?.unwrap().bitmap; bitmap -= docids; if bitmap.is_empty() { let mut next_key = None; if let Some((next, _)) = - self.db.remap_data_type::().get_greater_than(&txn, &key)? + self.db.remap_data_type::().get_greater_than(txn, &key)? { if next.field_id == field_id && next.level == 0 { next_key = Some(next.left_bound.to_vec()); @@ -567,7 +567,7 @@ impl FacetsUpdateIncrementalInner { } } - pub fn delete<'a, 't>( + pub fn delete<'t>( &self, txn: &'t mut RwTxn, field_id: u16, @@ -582,7 +582,7 @@ impl FacetsUpdateIncrementalInner { { return Ok(()); } - let highest_level = get_highest_level(&txn, self.db, field_id)?; + let highest_level = get_highest_level(txn, self.db, field_id)?; let result = self.delete_in_level(txn, field_id, highest_level as u8, facet_value, docids)?; @@ -603,7 +603,7 @@ impl FacetsUpdateIncrementalInner { || self .db .as_polymorph() - .prefix_iter::<_, ByteSlice, ByteSlice>(&txn, &highest_level_prefix)? + .prefix_iter::<_, ByteSlice, ByteSlice>(txn, &highest_level_prefix)? .count() >= self.min_level_size as usize { @@ -614,7 +614,7 @@ impl FacetsUpdateIncrementalInner { .db .as_polymorph() .prefix_iter::<_, ByteSlice, ByteSlice>(txn, &highest_level_prefix)?; - while let Some(el) = iter.next() { + for el in iter.by_ref() { let (k, _) = el?; to_delete.push( FacetGroupKeyCodec::::bytes_decode(k) @@ -640,7 +640,7 @@ impl<'a> FacetGroupKey<&'a [u8]> { } } -impl<'a> FacetGroupKey> { +impl FacetGroupKey> { pub fn as_ref(&self) -> FacetGroupKey<&[u8]> { FacetGroupKey { field_id: self.field_id, @@ -804,7 +804,7 @@ mod tests { let mut bitmap = RoaringBitmap::new(); bitmap.insert(i); index.verify_structure_validity(&txn, 0); - index.insert(&mut txn, 0, &(&(i as f64)), &bitmap); + index.insert(&mut txn, 0, &(i as f64), &bitmap); } for i in (200..256).into_iter().rev() { diff --git a/milli/src/update/index_documents/extract/extract_geo_points.rs b/milli/src/update/index_documents/extract/extract_geo_points.rs index c75b60c60..55044e712 100644 --- a/milli/src/update/index_documents/extract/extract_geo_points.rs +++ b/milli/src/update/index_documents/extract/extract_geo_points.rs @@ -51,6 +51,7 @@ pub fn extract_geo_points( ) .map_err(|lng| GeoError::BadLongitude { document_id: document_id(), value: lng })?; + #[allow(clippy::drop_non_drop)] let bytes: [u8; 16] = concat_arrays![lat.to_ne_bytes(), lng.to_ne_bytes()]; writer.insert(docid_bytes, bytes)?; } else if lat.is_none() && lng.is_some() { diff --git a/milli/src/update/index_documents/extract/mod.rs b/milli/src/update/index_documents/extract/mod.rs index c0f12e9ee..e696ed44b 100644 --- a/milli/src/update/index_documents/extract/mod.rs +++ b/milli/src/update/index_documents/extract/mod.rs @@ -33,6 +33,7 @@ use crate::{FieldId, Result}; /// Extract data for each databases from obkv documents in parallel. /// Send data in grenad file over provided Sender. +#[allow(clippy::too_many_arguments)] pub(crate) fn data_from_obkv_documents( original_obkv_chunks: impl Iterator>> + Send, flattened_obkv_chunks: impl Iterator>> + Send, @@ -53,6 +54,7 @@ pub(crate) fn data_from_obkv_documents( }) .collect::>()?; + #[allow(clippy::type_complexity)] let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, Vec<_>)))> = flattened_obkv_chunks .par_bridge() .map(|flattened_obkv_chunks| { @@ -217,6 +219,8 @@ fn send_original_documents_data( /// - docid_fid_facet_numbers /// - docid_fid_facet_strings /// - docid_fid_facet_exists +#[allow(clippy::too_many_arguments)] +#[allow(clippy::type_complexity)] fn send_and_extract_flattened_documents_data( flattened_documents_chunk: Result>, indexer: GrenadParameters, diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index a121d3ae0..e9f5c2d38 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -598,6 +598,7 @@ where } /// Run the word prefix docids update operation. +#[allow(clippy::too_many_arguments)] fn execute_word_prefix_docids( txn: &mut heed::RwTxn, reader: grenad::Reader>, @@ -772,7 +773,7 @@ mod tests { let docs = index.documents(&rtxn, vec![0, 1, 2]).unwrap(); let (_id, obkv) = docs.iter().find(|(_id, kv)| kv.get(0) == Some(br#""kevin""#)).unwrap(); - let kevin_uuid: String = serde_json::from_slice(&obkv.get(1).unwrap()).unwrap(); + let kevin_uuid: String = serde_json::from_slice(obkv.get(1).unwrap()).unwrap(); drop(rtxn); // Second we send 1 document with the generated uuid, to erase the previous ones. @@ -1811,7 +1812,7 @@ mod tests { let long_word = "lol".repeat(1000); let doc1 = documents! {[{ "id": "1", - "title": long_word.clone(), + "title": long_word, }]}; index.add_documents(doc1).unwrap(); diff --git a/milli/src/update/prefix_word_pairs/prefix_word.rs b/milli/src/update/prefix_word_pairs/prefix_word.rs index 952e02558..60e2e554e 100644 --- a/milli/src/update/prefix_word_pairs/prefix_word.rs +++ b/milli/src/update/prefix_word_pairs/prefix_word.rs @@ -12,6 +12,7 @@ use crate::update::prefix_word_pairs::{ }; use crate::{CboRoaringBitmapCodec, Result, U8StrStrCodec, UncheckedU8StrStrCodec}; +#[allow(clippy::too_many_arguments)] #[logging_timer::time] pub fn index_prefix_word_database( wtxn: &mut heed::RwTxn, @@ -38,8 +39,7 @@ pub fn index_prefix_word_database( for proximity in 1..max_proximity { for prefix in common_prefixes.iter() { - let mut prefix_key = vec![]; - prefix_key.push(proximity); + let mut prefix_key = vec![proximity]; prefix_key.extend_from_slice(prefix.as_bytes()); let mut cursor = new_word_pair_proximity_docids.clone().into_prefix_iter(prefix_key)?; // This is the core of the algorithm @@ -84,8 +84,7 @@ pub fn index_prefix_word_database( for proximity in 1..max_proximity { for prefix in new_prefixes.iter() { - let mut prefix_key = vec![]; - prefix_key.push(proximity); + let mut prefix_key = vec![proximity]; prefix_key.extend_from_slice(prefix.as_bytes()); let mut db_iter = word_pair_proximity_docids .as_polymorph() diff --git a/milli/src/update/prefix_word_pairs/word_prefix.rs b/milli/src/update/prefix_word_pairs/word_prefix.rs index 53e421fac..71a2a2915 100644 --- a/milli/src/update/prefix_word_pairs/word_prefix.rs +++ b/milli/src/update/prefix_word_pairs/word_prefix.rs @@ -176,6 +176,7 @@ use crate::update::prefix_word_pairs::{ }; use crate::{CboRoaringBitmapCodec, Result, U8StrStrCodec, UncheckedU8StrStrCodec}; +#[allow(clippy::too_many_arguments)] #[logging_timer::time] pub fn index_word_prefix_database( wtxn: &mut heed::RwTxn, @@ -385,6 +386,7 @@ can be inserted into the database in sorted order. When it is flushed, it calls struct PrefixAndProximityBatch { proximity: u8, word1: Vec, + #[allow(clippy::type_complexity)] batch: Vec<(Vec, Vec>)>, } @@ -574,7 +576,7 @@ mod tests { expected_prefixes: &[&str], ) { let mut actual_prefixes = vec![]; - trie.for_each_prefix_of(word.as_bytes(), &mut Vec::new(), &search_start, |x| { + trie.for_each_prefix_of(word.as_bytes(), &mut Vec::new(), search_start, |x| { let s = String::from_utf8(x.to_owned()).unwrap(); actual_prefixes.push(s); }); diff --git a/milli/tests/search/mod.rs b/milli/tests/search/mod.rs index 8de4c80aa..d63df96ec 100644 --- a/milli/tests/search/mod.rs +++ b/milli/tests/search/mod.rs @@ -20,7 +20,7 @@ mod query_criteria; mod sort; mod typo_tolerance; -pub const TEST_QUERY: &'static str = "hello world america"; +pub const TEST_QUERY: &str = "hello world america"; pub const EXTERNAL_DOCUMENTS_IDS: &[&str; 17] = &["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q"]; @@ -178,7 +178,7 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option { { id = Some(document.id.clone()) } - } else if let Some((field, filter)) = filter.split_once("=") { + } else if let Some((field, filter)) = filter.split_once('=') { if field == "tag" && document.tag == filter { id = Some(document.id.clone()) } else if field == "asc_desc_rank" @@ -186,11 +186,11 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option { { id = Some(document.id.clone()) } - } else if let Some(("asc_desc_rank", filter)) = filter.split_once("<") { + } else if let Some(("asc_desc_rank", filter)) = filter.split_once('<') { if document.asc_desc_rank < filter.parse().unwrap() { id = Some(document.id.clone()) } - } else if let Some(("asc_desc_rank", filter)) = filter.split_once(">") { + } else if let Some(("asc_desc_rank", filter)) = filter.split_once('>') { if document.asc_desc_rank > filter.parse().unwrap() { id = Some(document.id.clone()) } diff --git a/milli/tests/search/query_criteria.rs b/milli/tests/search/query_criteria.rs index 90b4d6362..d4aa859a4 100644 --- a/milli/tests/search/query_criteria.rs +++ b/milli/tests/search/query_criteria.rs @@ -200,7 +200,7 @@ test_criterion!( #[test] fn criteria_mixup() { use Criterion::*; - let index = search::setup_search_index_with_criteria(&vec![ + let index = search::setup_search_index_with_criteria(&[ Words, Attribute, Desc(S("asc_desc_rank")),