3434: Make clippy happy for Rust 1.67, allow `uninlined_format_args` r=Kerollmops a=dureuill

# Pull Request

This PR allows `uninlined_format_args` in CI for clippy.

This is due to https://github.com/rust-lang/rust-clippy/issues/10087, which in particular has correctness issues wrt edition 2018 crates, and is a big change altogether. https://github.com/rust-lang/rust-clippy/pull/10265 is already open in order to change the category of this lint to "pedantic", meaning that if this latter PR merges, a future Rust release will accept our code unmodified wrt uninlined format arguments.

As a result, this PR introduces the following changes:

1. Allow `uninlined_format_args` in the clippy command in CI.
2. Use rewind rather than seek(0)
3. Remove lifetimes that clippy deems needless.

Co-authored-by: Louis Dureuil <louis@meilisearch.com>
This commit is contained in:
bors[bot] 2023-01-31 09:45:12 +00:00 committed by GitHub
commit f647b20818
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 51 additions and 50 deletions

View File

@ -109,7 +109,8 @@ jobs:
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: clippy command: clippy
args: --all-targets -- --deny warnings # allow unlined_format_args https://github.com/rust-lang/rust-clippy/issues/10087
args: --all-targets -- --deny warnings --allow clippy::uninlined_format_args
fmt: fmt:
name: Run Rustfmt name: Run Rustfmt

View File

@ -198,7 +198,7 @@ impl From<KindWithContent> for KindDump {
#[cfg(test)] #[cfg(test)]
pub(crate) mod test { pub(crate) mod test {
use std::fs::File; use std::fs::File;
use std::io::{Seek, SeekFrom}; use std::io::Seek;
use std::str::FromStr; use std::str::FromStr;
use big_s::S; use big_s::S;
@ -410,7 +410,7 @@ pub(crate) mod test {
// create the dump // create the dump
let mut file = tempfile::tempfile().unwrap(); let mut file = tempfile::tempfile().unwrap();
dump.persist_to(&mut file).unwrap(); dump.persist_to(&mut file).unwrap();
file.seek(SeekFrom::Start(0)).unwrap(); file.rewind().unwrap();
file file
} }

View File

@ -33,7 +33,7 @@
//! //!
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{BufRead, BufReader, ErrorKind, Seek, SeekFrom}; use std::io::{BufRead, BufReader, ErrorKind, Seek};
use std::path::Path; use std::path::Path;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -178,7 +178,7 @@ impl V5Reader {
} }
pub fn keys(&mut self) -> Result<Box<dyn Iterator<Item = Result<Key>> + '_>> { pub fn keys(&mut self) -> Result<Box<dyn Iterator<Item = Result<Key>> + '_>> {
self.keys.seek(SeekFrom::Start(0))?; self.keys.rewind()?;
Ok(Box::new( Ok(Box::new(
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }), (&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
)) ))

View File

@ -947,9 +947,9 @@ impl IndexScheduler {
/// ///
/// ## Return /// ## Return
/// The list of processed tasks. /// The list of processed tasks.
fn apply_index_operation<'txn, 'i>( fn apply_index_operation<'i>(
&self, &self,
index_wtxn: &'txn mut RwTxn<'i, '_>, index_wtxn: &'_ mut RwTxn<'i, '_>,
index: &'i Index, index: &'i Index,
operation: IndexOperation, operation: IndexOperation,
) -> Result<Vec<Task>> { ) -> Result<Vec<Task>> {

View File

@ -474,10 +474,10 @@ fn make_document(
Ok(document) Ok(document)
} }
fn format_fields<'a, A: AsRef<[u8]>>( fn format_fields<A: AsRef<[u8]>>(
document: &Document, document: &Document,
field_ids_map: &FieldsIdsMap, field_ids_map: &FieldsIdsMap,
builder: &MatcherBuilder<'a, A>, builder: &MatcherBuilder<'_, A>,
formatted_options: &BTreeMap<FieldId, FormatOptions>, formatted_options: &BTreeMap<FieldId, FormatOptions>,
compute_matches: bool, compute_matches: bool,
displayable_ids: &BTreeSet<FieldId>, displayable_ids: &BTreeSet<FieldId>,
@ -522,9 +522,9 @@ fn format_fields<'a, A: AsRef<[u8]>>(
Ok((matches_position, document)) Ok((matches_position, document))
} }
fn format_value<'a, A: AsRef<[u8]>>( fn format_value<A: AsRef<[u8]>>(
value: Value, value: Value,
builder: &MatcherBuilder<'a, A>, builder: &MatcherBuilder<'_, A>,
format_options: Option<FormatOptions>, format_options: Option<FormatOptions>,
infos: &mut Vec<MatchBounds>, infos: &mut Vec<MatchBounds>,
compute_matches: bool, compute_matches: bool,

View File

@ -348,10 +348,10 @@ impl Index {
/* external documents ids */ /* external documents ids */
/// Writes the external documents ids and internal ids (i.e. `u32`). /// Writes the external documents ids and internal ids (i.e. `u32`).
pub(crate) fn put_external_documents_ids<'a>( pub(crate) fn put_external_documents_ids(
&self, &self,
wtxn: &mut RwTxn, wtxn: &mut RwTxn,
external_documents_ids: &ExternalDocumentsIds<'a>, external_documents_ids: &ExternalDocumentsIds<'_>,
) -> heed::Result<()> { ) -> heed::Result<()> {
let ExternalDocumentsIds { hard, soft, .. } = external_documents_ids; let ExternalDocumentsIds { hard, soft, .. } = external_documents_ids;
let hard = hard.as_fst().as_bytes(); let hard = hard.as_fst().as_bytes();
@ -426,7 +426,7 @@ impl Index {
} }
/// Returns the `rtree` which associates coordinates to documents ids. /// Returns the `rtree` which associates coordinates to documents ids.
pub fn geo_rtree<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<RTree<GeoPoint>>> { pub fn geo_rtree(&self, rtxn: &'_ RoTxn) -> Result<Option<RTree<GeoPoint>>> {
match self match self
.main .main
.get::<_, Str, SerdeBincode<RTree<GeoPoint>>>(rtxn, main_key::GEO_RTREE_KEY)? .get::<_, Str, SerdeBincode<RTree<GeoPoint>>>(rtxn, main_key::GEO_RTREE_KEY)?

View File

@ -182,15 +182,15 @@ impl<'t> Criterion for Proximity<'t> {
} }
} }
fn resolve_candidates<'t>( fn resolve_candidates(
ctx: &'t dyn Context, ctx: &'_ dyn Context,
query_tree: &Operation, query_tree: &Operation,
proximity: u8, proximity: u8,
cache: &mut Cache, cache: &mut Cache,
wdcache: &mut WordDerivationsCache, wdcache: &mut WordDerivationsCache,
) -> Result<RoaringBitmap> { ) -> Result<RoaringBitmap> {
fn resolve_operation<'t>( fn resolve_operation(
ctx: &'t dyn Context, ctx: &'_ dyn Context,
query_tree: &Operation, query_tree: &Operation,
proximity: u8, proximity: u8,
cache: &mut Cache, cache: &mut Cache,
@ -243,8 +243,8 @@ fn resolve_candidates<'t>(
Ok(result) Ok(result)
} }
fn mdfs_pair<'t>( fn mdfs_pair(
ctx: &'t dyn Context, ctx: &'_ dyn Context,
left: &Operation, left: &Operation,
right: &Operation, right: &Operation,
proximity: u8, proximity: u8,
@ -298,8 +298,8 @@ fn resolve_candidates<'t>(
Ok(output) Ok(output)
} }
fn mdfs<'t>( fn mdfs(
ctx: &'t dyn Context, ctx: &'_ dyn Context,
branches: &[Operation], branches: &[Operation],
proximity: u8, proximity: u8,
cache: &mut Cache, cache: &mut Cache,

View File

@ -239,15 +239,15 @@ fn alterate_query_tree(
Ok(query_tree) Ok(query_tree)
} }
fn resolve_candidates<'t>( fn resolve_candidates(
ctx: &'t dyn Context, ctx: &'_ dyn Context,
query_tree: &Operation, query_tree: &Operation,
number_typos: u8, number_typos: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
wdcache: &mut WordDerivationsCache, wdcache: &mut WordDerivationsCache,
) -> Result<RoaringBitmap> { ) -> Result<RoaringBitmap> {
fn resolve_operation<'t>( fn resolve_operation(
ctx: &'t dyn Context, ctx: &'_ dyn Context,
query_tree: &Operation, query_tree: &Operation,
number_typos: u8, number_typos: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
@ -276,8 +276,8 @@ fn resolve_candidates<'t>(
} }
} }
fn mdfs<'t>( fn mdfs(
ctx: &'t dyn Context, ctx: &'_ dyn Context,
branches: &[Operation], branches: &[Operation],
mana: u8, mana: u8,
cache: &mut HashMap<(Operation, u8), RoaringBitmap>, cache: &mut HashMap<(Operation, u8), RoaringBitmap>,

View File

@ -574,9 +574,9 @@ fn remove_from_word_docids(
Ok(()) Ok(())
} }
fn remove_docids_from_field_id_docid_facet_value<'i, 'a>( fn remove_docids_from_field_id_docid_facet_value(
index: &'i Index, index: &'_ Index,
wtxn: &'a mut heed::RwTxn, wtxn: &'_ mut heed::RwTxn,
facet_type: FacetType, facet_type: FacetType,
field_id: FieldId, field_id: FieldId,
to_remove: &RoaringBitmap, to_remove: &RoaringBitmap,

View File

@ -157,9 +157,9 @@ impl FacetsUpdateIncrementalInner {
/// ///
/// ## Return /// ## Return
/// See documentation of `insert_in_level` /// See documentation of `insert_in_level`
fn insert_in_level_0<'t>( fn insert_in_level_0(
&self, &self,
txn: &'t mut RwTxn, txn: &'_ mut RwTxn,
field_id: u16, field_id: u16,
facet_value: &[u8], facet_value: &[u8],
docids: &RoaringBitmap, docids: &RoaringBitmap,
@ -211,9 +211,9 @@ impl FacetsUpdateIncrementalInner {
/// - `InsertionResult::Insert` means that inserting the `facet_value` into the `level` resulted /// - `InsertionResult::Insert` means that inserting the `facet_value` into the `level` resulted
/// in the addition of a new key in that level, and that therefore the number of children /// in the addition of a new key in that level, and that therefore the number of children
/// of the parent node should be incremented. /// of the parent node should be incremented.
fn insert_in_level<'t>( fn insert_in_level(
&self, &self,
txn: &'t mut RwTxn, txn: &'_ mut RwTxn,
field_id: u16, field_id: u16,
level: u8, level: u8,
facet_value: &[u8], facet_value: &[u8],
@ -348,9 +348,9 @@ impl FacetsUpdateIncrementalInner {
} }
/// Insert the given facet value and corresponding document ids in the database. /// Insert the given facet value and corresponding document ids in the database.
pub fn insert<'t>( pub fn insert(
&self, &self,
txn: &'t mut RwTxn, txn: &'_ mut RwTxn,
field_id: u16, field_id: u16,
facet_value: &[u8], facet_value: &[u8],
docids: &RoaringBitmap, docids: &RoaringBitmap,
@ -470,9 +470,9 @@ impl FacetsUpdateIncrementalInner {
/// in level 1, the key with the left bound `3` had to be changed to the next facet value (e.g. 4). /// in level 1, the key with the left bound `3` had to be changed to the next facet value (e.g. 4).
/// In that case `DeletionResult::Reduce` is returned. The parent of the reduced key may need to adjust /// In that case `DeletionResult::Reduce` is returned. The parent of the reduced key may need to adjust
/// its left bound as well. /// its left bound as well.
fn delete_in_level<'t>( fn delete_in_level(
&self, &self,
txn: &'t mut RwTxn, txn: &'_ mut RwTxn,
field_id: u16, field_id: u16,
level: u8, level: u8,
facet_value: &[u8], facet_value: &[u8],
@ -529,9 +529,9 @@ impl FacetsUpdateIncrementalInner {
} }
} }
fn delete_in_level_0<'t>( fn delete_in_level_0(
&self, &self,
txn: &'t mut RwTxn, txn: &'_ mut RwTxn,
field_id: u16, field_id: u16,
facet_value: &[u8], facet_value: &[u8],
docids: &RoaringBitmap, docids: &RoaringBitmap,
@ -557,9 +557,9 @@ impl FacetsUpdateIncrementalInner {
} }
} }
pub fn delete<'t>( pub fn delete(
&self, &self,
txn: &'t mut RwTxn, txn: &'_ mut RwTxn,
field_id: u16, field_id: u16,
facet_value: &[u8], facet_value: &[u8],
docids: &RoaringBitmap, docids: &RoaringBitmap,

View File

@ -1,6 +1,6 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::fs::File; use std::fs::File;
use std::io::{self, Seek, SeekFrom}; use std::io::{self, Seek};
use std::time::Instant; use std::time::Instant;
use grenad::{CompressionType, Sorter}; use grenad::{CompressionType, Sorter};
@ -66,7 +66,7 @@ pub fn sorter_into_reader(
pub fn writer_into_reader(writer: grenad::Writer<File>) -> Result<grenad::Reader<File>> { pub fn writer_into_reader(writer: grenad::Writer<File>) -> Result<grenad::Reader<File>> {
let mut file = writer.into_inner()?; let mut file = writer.into_inner()?;
file.seek(SeekFrom::Start(0))?; file.rewind()?;
grenad::Reader::new(file).map_err(Into::into) grenad::Reader::new(file).map_err(Into::into)
} }

View File

@ -2,7 +2,7 @@ use std::borrow::Cow;
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fs::File; use std::fs::File;
use std::io::{Read, Seek, SeekFrom}; use std::io::{Read, Seek};
use fxhash::FxHashMap; use fxhash::FxHashMap;
use heed::RoTxn; use heed::RoTxn;
@ -510,7 +510,7 @@ impl<'a, 'i> Transform<'a, 'i> {
let mut original_documents = writer.into_inner()?; let mut original_documents = writer.into_inner()?;
// We then extract the file and reset the seek to be able to read it again. // We then extract the file and reset the seek to be able to read it again.
original_documents.seek(SeekFrom::Start(0))?; original_documents.rewind()?;
// We create a final writer to write the new documents in order from the sorter. // We create a final writer to write the new documents in order from the sorter.
let mut writer = create_writer( let mut writer = create_writer(
@ -522,7 +522,7 @@ impl<'a, 'i> Transform<'a, 'i> {
// into this writer, extract the file and reset the seek to be able to read it again. // into this writer, extract the file and reset the seek to be able to read it again.
self.flattened_sorter.write_into_stream_writer(&mut writer)?; self.flattened_sorter.write_into_stream_writer(&mut writer)?;
let mut flattened_documents = writer.into_inner()?; let mut flattened_documents = writer.into_inner()?;
flattened_documents.seek(SeekFrom::Start(0))?; flattened_documents.rewind()?;
let mut new_external_documents_ids_builder: Vec<_> = let mut new_external_documents_ids_builder: Vec<_> =
self.new_external_documents_ids_builder.into_iter().collect(); self.new_external_documents_ids_builder.into_iter().collect();
@ -650,10 +650,10 @@ impl<'a, 'i> Transform<'a, 'i> {
// Once we have written all the documents, we extract // Once we have written all the documents, we extract
// the file and reset the seek to be able to read it again. // the file and reset the seek to be able to read it again.
let mut original_documents = original_writer.into_inner()?; let mut original_documents = original_writer.into_inner()?;
original_documents.seek(SeekFrom::Start(0))?; original_documents.rewind()?;
let mut flattened_documents = flattened_writer.into_inner()?; let mut flattened_documents = flattened_writer.into_inner()?;
flattened_documents.seek(SeekFrom::Start(0))?; flattened_documents.rewind()?;
let output = TransformOutput { let output = TransformOutput {
primary_key, primary_key,