mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-30 00:55:00 +08:00
Fix cargo clippy errors
Dont apply clippy for tests for now Fix clippy warnings of filter-parser package parent 8352febd646ec4bcf56a44161e5c4dce0e55111f author unvalley <38400669+unvalley@users.noreply.github.com> 1666325847 +0900 committer unvalley <kirohi.code@gmail.com> 1666791316 +0900 Update .github/workflows/rust.yml Co-authored-by: Clémentine Urquizar - curqui <clementine@meilisearch.com> Allow clippy lint too_many_argments Allow clippy lint needless_collect Allow clippy lint too_many_arguments and type_complexity Fix for clippy warnings comparison_chains Fix for clippy warnings vec_init_then_push Allow clippy lint should_implement_trait Allow clippy lint drop_non_drop Fix lifetime clipy warnings in filter-paprser Execute cargo fmt Fix clippy remaining warnings Fix clippy remaining warnings again and allow lint on each place
This commit is contained in:
parent
811f156031
commit
c7322f704c
1
.github/workflows/rust.yml
vendored
1
.github/workflows/rust.yml
vendored
@ -65,7 +65,6 @@ jobs:
|
|||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
args: --all-targets
|
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
name: Run Rustfmt
|
name: Run Rustfmt
|
||||||
|
@ -48,17 +48,14 @@ pub fn parse_condition(input: Span) -> IResult<FilterCondition> {
|
|||||||
pub fn parse_exists(input: Span) -> IResult<FilterCondition> {
|
pub fn parse_exists(input: Span) -> IResult<FilterCondition> {
|
||||||
let (input, key) = terminated(parse_value, tag("EXISTS"))(input)?;
|
let (input, key) = terminated(parse_value, tag("EXISTS"))(input)?;
|
||||||
|
|
||||||
Ok((input, FilterCondition::Condition { fid: key.into(), op: Exists }))
|
Ok((input, FilterCondition::Condition { fid: key, op: Exists }))
|
||||||
}
|
}
|
||||||
/// exist = value "NOT" WS+ "EXISTS"
|
/// exist = value "NOT" WS+ "EXISTS"
|
||||||
pub fn parse_not_exists(input: Span) -> IResult<FilterCondition> {
|
pub fn parse_not_exists(input: Span) -> IResult<FilterCondition> {
|
||||||
let (input, key) = parse_value(input)?;
|
let (input, key) = parse_value(input)?;
|
||||||
|
|
||||||
let (input, _) = tuple((tag("NOT"), multispace1, tag("EXISTS")))(input)?;
|
let (input, _) = tuple((tag("NOT"), multispace1, tag("EXISTS")))(input)?;
|
||||||
Ok((
|
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
||||||
input,
|
|
||||||
FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key.into(), op: Exists })),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// to = value value "TO" WS+ value
|
/// to = value value "TO" WS+ value
|
||||||
|
@ -168,7 +168,7 @@ fn ws<'a, O>(inner: impl FnMut(Span<'a>) -> IResult<O>) -> impl FnMut(Span<'a>)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// value_list = (value ("," value)* ","?)?
|
/// value_list = (value ("," value)* ","?)?
|
||||||
fn parse_value_list<'a>(input: Span<'a>) -> IResult<Vec<Token<'a>>> {
|
fn parse_value_list(input: Span) -> IResult<Vec<Token>> {
|
||||||
let (input, first_value) = opt(parse_value)(input)?;
|
let (input, first_value) = opt(parse_value)(input)?;
|
||||||
if let Some(first_value) = first_value {
|
if let Some(first_value) = first_value {
|
||||||
let value_list_el_parser = preceded(ws(tag(",")), parse_value);
|
let value_list_el_parser = preceded(ws(tag(",")), parse_value);
|
||||||
@ -335,17 +335,17 @@ fn parse_error_reserved_keyword(input: Span) -> IResult<FilterCondition> {
|
|||||||
Ok(result) => Ok(result),
|
Ok(result) => Ok(result),
|
||||||
Err(nom::Err::Error(inner) | nom::Err::Failure(inner)) => match inner.kind() {
|
Err(nom::Err::Error(inner) | nom::Err::Failure(inner)) => match inner.kind() {
|
||||||
ErrorKind::ExpectedValue(ExpectedValueKind::ReservedKeyword) => {
|
ErrorKind::ExpectedValue(ExpectedValueKind::ReservedKeyword) => {
|
||||||
return Err(nom::Err::Failure(inner));
|
Err(nom::Err::Failure(inner))
|
||||||
}
|
}
|
||||||
_ => return Err(nom::Err::Error(inner)),
|
_ => Err(nom::Err::Error(inner)),
|
||||||
},
|
},
|
||||||
Err(e) => {
|
Err(e) => Err(e),
|
||||||
return Err(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// primary = (WS* "(" WS* expression WS* ")" WS*) | geoRadius | condition | exists | not_exists | to
|
/**
|
||||||
|
primary = (WS* "(" WS* expression WS* ")" WS*) | geoRadius | condition | exists | not_exists | to
|
||||||
|
*/
|
||||||
fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
||||||
if depth > MAX_FILTER_DEPTH {
|
if depth > MAX_FILTER_DEPTH {
|
||||||
return Err(nom::Err::Error(Error::new_from_kind(input, ErrorKind::DepthLimitReached)));
|
return Err(nom::Err::Error(Error::new_from_kind(input, ErrorKind::DepthLimitReached)));
|
||||||
|
@ -78,7 +78,7 @@ pub fn word_exact<'a, 'b: 'a>(tag: &'b str) -> impl Fn(Span<'a>) -> IResult<'a,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// value = WS* ( word | singleQuoted | doubleQuoted) WS+
|
/// value = WS* ( word | singleQuoted | doubleQuoted) WS+
|
||||||
pub fn parse_value<'a>(input: Span<'a>) -> IResult<Token<'a>> {
|
pub fn parse_value(input: Span) -> IResult<Token> {
|
||||||
// to get better diagnostic message we are going to strip the left whitespaces from the input right now
|
// to get better diagnostic message we are going to strip the left whitespaces from the input right now
|
||||||
let (input, _) = take_while(char::is_whitespace)(input)?;
|
let (input, _) = take_while(char::is_whitespace)(input)?;
|
||||||
|
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
#![cfg_attr(all(test, fuzzing), feature(no_coverage))]
|
#![cfg_attr(all(test, fuzzing), feature(no_coverage))]
|
||||||
#![allow(clippy::reversed_empty_ranges)]
|
|
||||||
#![allow(clippy::too_many_arguments)]
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub mod documents;
|
pub mod documents;
|
||||||
|
|
||||||
|
@ -242,6 +242,7 @@ fn iterative_facet_number_ordered_iter<'t>(
|
|||||||
// The itertools GroupBy iterator doesn't provide an owned version, we are therefore
|
// The itertools GroupBy iterator doesn't provide an owned version, we are therefore
|
||||||
// required to collect the result into an owned collection (a Vec).
|
// required to collect the result into an owned collection (a Vec).
|
||||||
// https://github.com/rust-itertools/itertools/issues/499
|
// https://github.com/rust-itertools/itertools/issues/499
|
||||||
|
#[allow(clippy::needless_collect)]
|
||||||
let vec: Vec<_> = iter
|
let vec: Vec<_> = iter
|
||||||
.group_by(|(_, v)| *v)
|
.group_by(|(_, v)| *v)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -284,6 +285,7 @@ fn iterative_facet_string_ordered_iter<'t>(
|
|||||||
// The itertools GroupBy iterator doesn't provide an owned version, we are therefore
|
// The itertools GroupBy iterator doesn't provide an owned version, we are therefore
|
||||||
// required to collect the result into an owned collection (a Vec).
|
// required to collect the result into an owned collection (a Vec).
|
||||||
// https://github.com/rust-itertools/itertools/issues/499
|
// https://github.com/rust-itertools/itertools/issues/499
|
||||||
|
#[allow(clippy::needless_collect)]
|
||||||
let vec: Vec<_> = iter
|
let vec: Vec<_> = iter
|
||||||
.group_by(|(_, v)| *v)
|
.group_by(|(_, v)| *v)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -179,6 +179,7 @@ impl<'t> Criterion for Attribute<'t> {
|
|||||||
/// QueryPositionIterator is an Iterator over positions of a Query,
|
/// QueryPositionIterator is an Iterator over positions of a Query,
|
||||||
/// It contains iterators over words positions.
|
/// It contains iterators over words positions.
|
||||||
struct QueryPositionIterator<'t> {
|
struct QueryPositionIterator<'t> {
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
inner:
|
inner:
|
||||||
Vec<Peekable<Box<dyn Iterator<Item = heed::Result<((&'t str, u32), RoaringBitmap)>> + 't>>>,
|
Vec<Peekable<Box<dyn Iterator<Item = heed::Result<((&'t str, u32), RoaringBitmap)>> + 't>>>,
|
||||||
}
|
}
|
||||||
|
@ -96,6 +96,7 @@ pub trait Context<'c> {
|
|||||||
&self,
|
&self,
|
||||||
docid: DocumentId,
|
docid: DocumentId,
|
||||||
) -> heed::Result<HashMap<String, RoaringBitmap>>;
|
) -> heed::Result<HashMap<String, RoaringBitmap>>;
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
fn word_position_iterator(
|
fn word_position_iterator(
|
||||||
&self,
|
&self,
|
||||||
word: &str,
|
word: &str,
|
||||||
@ -610,11 +611,7 @@ fn query_pair_proximity_docids(
|
|||||||
}
|
}
|
||||||
(QueryKind::Exact { word: left, .. }, QueryKind::Tolerant { typo, word: right }) => {
|
(QueryKind::Exact { word: left, .. }, QueryKind::Tolerant { typo, word: right }) => {
|
||||||
let r_words = word_derivations(right, prefix, *typo, ctx.words_fst(), wdcache)?;
|
let r_words = word_derivations(right, prefix, *typo, ctx.words_fst(), wdcache)?;
|
||||||
<<<<<<< HEAD
|
|
||||||
all_word_pair_overall_proximity_docids(ctx, &[(left, 0)], r_words, proximity)
|
all_word_pair_overall_proximity_docids(ctx, &[(left, 0)], r_words, proximity)
|
||||||
=======
|
|
||||||
all_word_pair_proximity_docids(ctx, &[(left, 0)], r_words, proximity)
|
|
||||||
>>>>>>> 08fe530b (Execute cargo clippy --fix)
|
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
QueryKind::Tolerant { typo: l_typo, word: left },
|
QueryKind::Tolerant { typo: l_typo, word: left },
|
||||||
|
@ -123,6 +123,7 @@ impl<'a> FacetDistinctIter<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::drop_non_drop)]
|
||||||
fn facet_values_prefix_key(distinct: FieldId, id: DocumentId) -> [u8; FID_SIZE + DOCID_SIZE] {
|
fn facet_values_prefix_key(distinct: FieldId, id: DocumentId) -> [u8; FID_SIZE + DOCID_SIZE] {
|
||||||
concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes())
|
concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes())
|
||||||
}
|
}
|
||||||
|
@ -100,10 +100,10 @@ impl<'a> Filter<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ors.len() > 1 {
|
match ors.len() {
|
||||||
ands.push(FilterCondition::Or(ors));
|
1 => ands.push(ors.pop().unwrap()),
|
||||||
} else if ors.len() == 1 {
|
n if n > 1 => ands.push(FilterCondition::Or(ors)),
|
||||||
ands.push(ors.pop().unwrap());
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Either::Right(rule) => {
|
Either::Right(rule) => {
|
||||||
@ -128,6 +128,7 @@ impl<'a> Filter<'a> {
|
|||||||
Ok(Some(Self { condition: and }))
|
Ok(Some(Self { condition: and }))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::should_implement_trait)]
|
||||||
pub fn from_str(expression: &'a str) -> Result<Option<Self>> {
|
pub fn from_str(expression: &'a str) -> Result<Option<Self>> {
|
||||||
let condition = match FilterCondition::parse(expression) {
|
let condition = match FilterCondition::parse(expression) {
|
||||||
Ok(Some(fc)) => Ok(fc),
|
Ok(Some(fc)) => Ok(fc),
|
||||||
|
@ -125,10 +125,7 @@ impl<'t, A: AsRef<[u8]>> Matcher<'t, '_, A> {
|
|||||||
words_positions: &mut impl Iterator<Item = (usize, usize, &'a Token<'a>)>,
|
words_positions: &mut impl Iterator<Item = (usize, usize, &'a Token<'a>)>,
|
||||||
matches: &mut Vec<Match>,
|
matches: &mut Vec<Match>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut potential_matches = Vec::new();
|
let mut potential_matches = vec![(token_position, word_position, partial.char_len())];
|
||||||
|
|
||||||
// Add first match to potential matches.
|
|
||||||
potential_matches.push((token_position, word_position, partial.char_len()));
|
|
||||||
|
|
||||||
for (token_position, word_position, word) in words_positions {
|
for (token_position, word_position, word) in words_positions {
|
||||||
partial = match partial.match_token(word) {
|
partial = match partial.match_token(word) {
|
||||||
|
@ -21,6 +21,7 @@ impl AvailableDocumentsIds {
|
|||||||
|
|
||||||
let iter = match last_id.checked_add(1) {
|
let iter = match last_id.checked_add(1) {
|
||||||
Some(id) => id..=u32::max_value(),
|
Some(id) => id..=u32::max_value(),
|
||||||
|
#[allow(clippy::reversed_empty_ranges)]
|
||||||
None => 1..=0, // empty range iterator
|
None => 1..=0, // empty range iterator
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -51,6 +51,7 @@ pub fn extract_geo_points<R: io::Read + io::Seek>(
|
|||||||
)
|
)
|
||||||
.map_err(|lng| GeoError::BadLongitude { document_id: document_id(), value: lng })?;
|
.map_err(|lng| GeoError::BadLongitude { document_id: document_id(), value: lng })?;
|
||||||
|
|
||||||
|
#[allow(clippy::drop_non_drop)]
|
||||||
let bytes: [u8; 16] = concat_arrays![lat.to_ne_bytes(), lng.to_ne_bytes()];
|
let bytes: [u8; 16] = concat_arrays![lat.to_ne_bytes(), lng.to_ne_bytes()];
|
||||||
writer.insert(docid_bytes, bytes)?;
|
writer.insert(docid_bytes, bytes)?;
|
||||||
} else if lat.is_none() && lng.is_some() {
|
} else if lat.is_none() && lng.is_some() {
|
||||||
|
@ -33,6 +33,7 @@ use crate::{FieldId, Result};
|
|||||||
|
|
||||||
/// Extract data for each databases from obkv documents in parallel.
|
/// Extract data for each databases from obkv documents in parallel.
|
||||||
/// Send data in grenad file over provided Sender.
|
/// Send data in grenad file over provided Sender.
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub(crate) fn data_from_obkv_documents(
|
pub(crate) fn data_from_obkv_documents(
|
||||||
original_obkv_chunks: impl Iterator<Item = Result<grenad::Reader<File>>> + Send,
|
original_obkv_chunks: impl Iterator<Item = Result<grenad::Reader<File>>> + Send,
|
||||||
flattened_obkv_chunks: impl Iterator<Item = Result<grenad::Reader<File>>> + Send,
|
flattened_obkv_chunks: impl Iterator<Item = Result<grenad::Reader<File>>> + Send,
|
||||||
@ -53,6 +54,7 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
})
|
})
|
||||||
.collect::<Result<()>>()?;
|
.collect::<Result<()>>()?;
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, Vec<_>)))> = flattened_obkv_chunks
|
let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, Vec<_>)))> = flattened_obkv_chunks
|
||||||
.par_bridge()
|
.par_bridge()
|
||||||
.map(|flattened_obkv_chunks| {
|
.map(|flattened_obkv_chunks| {
|
||||||
@ -217,6 +219,8 @@ fn send_original_documents_data(
|
|||||||
/// - docid_fid_facet_numbers
|
/// - docid_fid_facet_numbers
|
||||||
/// - docid_fid_facet_strings
|
/// - docid_fid_facet_strings
|
||||||
/// - docid_fid_facet_exists
|
/// - docid_fid_facet_exists
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
fn send_and_extract_flattened_documents_data(
|
fn send_and_extract_flattened_documents_data(
|
||||||
flattened_documents_chunk: Result<grenad::Reader<File>>,
|
flattened_documents_chunk: Result<grenad::Reader<File>>,
|
||||||
indexer: GrenadParameters,
|
indexer: GrenadParameters,
|
||||||
|
@ -598,6 +598,7 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Run the word prefix docids update operation.
|
/// Run the word prefix docids update operation.
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn execute_word_prefix_docids(
|
fn execute_word_prefix_docids(
|
||||||
txn: &mut heed::RwTxn,
|
txn: &mut heed::RwTxn,
|
||||||
reader: grenad::Reader<Cursor<ClonableMmap>>,
|
reader: grenad::Reader<Cursor<ClonableMmap>>,
|
||||||
|
@ -12,6 +12,7 @@ use crate::update::prefix_word_pairs::{
|
|||||||
};
|
};
|
||||||
use crate::{CboRoaringBitmapCodec, Result, U8StrStrCodec, UncheckedU8StrStrCodec};
|
use crate::{CboRoaringBitmapCodec, Result, U8StrStrCodec, UncheckedU8StrStrCodec};
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
#[logging_timer::time]
|
#[logging_timer::time]
|
||||||
pub fn index_prefix_word_database(
|
pub fn index_prefix_word_database(
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn,
|
||||||
@ -38,8 +39,7 @@ pub fn index_prefix_word_database(
|
|||||||
|
|
||||||
for proximity in 1..max_proximity {
|
for proximity in 1..max_proximity {
|
||||||
for prefix in common_prefixes.iter() {
|
for prefix in common_prefixes.iter() {
|
||||||
let mut prefix_key = vec![];
|
let mut prefix_key = vec![proximity];
|
||||||
prefix_key.push(proximity);
|
|
||||||
prefix_key.extend_from_slice(prefix.as_bytes());
|
prefix_key.extend_from_slice(prefix.as_bytes());
|
||||||
let mut cursor = new_word_pair_proximity_docids.clone().into_prefix_iter(prefix_key)?;
|
let mut cursor = new_word_pair_proximity_docids.clone().into_prefix_iter(prefix_key)?;
|
||||||
// This is the core of the algorithm
|
// This is the core of the algorithm
|
||||||
@ -84,8 +84,7 @@ pub fn index_prefix_word_database(
|
|||||||
|
|
||||||
for proximity in 1..max_proximity {
|
for proximity in 1..max_proximity {
|
||||||
for prefix in new_prefixes.iter() {
|
for prefix in new_prefixes.iter() {
|
||||||
let mut prefix_key = vec![];
|
let mut prefix_key = vec![proximity];
|
||||||
prefix_key.push(proximity);
|
|
||||||
prefix_key.extend_from_slice(prefix.as_bytes());
|
prefix_key.extend_from_slice(prefix.as_bytes());
|
||||||
let mut db_iter = word_pair_proximity_docids
|
let mut db_iter = word_pair_proximity_docids
|
||||||
.as_polymorph()
|
.as_polymorph()
|
||||||
|
@ -176,6 +176,7 @@ use crate::update::prefix_word_pairs::{
|
|||||||
};
|
};
|
||||||
use crate::{CboRoaringBitmapCodec, Result, U8StrStrCodec, UncheckedU8StrStrCodec};
|
use crate::{CboRoaringBitmapCodec, Result, U8StrStrCodec, UncheckedU8StrStrCodec};
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
#[logging_timer::time]
|
#[logging_timer::time]
|
||||||
pub fn index_word_prefix_database(
|
pub fn index_word_prefix_database(
|
||||||
wtxn: &mut heed::RwTxn,
|
wtxn: &mut heed::RwTxn,
|
||||||
@ -385,6 +386,7 @@ can be inserted into the database in sorted order. When it is flushed, it calls
|
|||||||
struct PrefixAndProximityBatch {
|
struct PrefixAndProximityBatch {
|
||||||
proximity: u8,
|
proximity: u8,
|
||||||
word1: Vec<u8>,
|
word1: Vec<u8>,
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
batch: Vec<(Vec<u8>, Vec<Cow<'static, [u8]>>)>,
|
batch: Vec<(Vec<u8>, Vec<Cow<'static, [u8]>>)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,12 +200,14 @@ test_criterion!(
|
|||||||
#[test]
|
#[test]
|
||||||
fn criteria_mixup() {
|
fn criteria_mixup() {
|
||||||
use Criterion::*;
|
use Criterion::*;
|
||||||
let index = search::setup_search_index_with_criteria(&[Words,
|
let index = search::setup_search_index_with_criteria(&[
|
||||||
|
Words,
|
||||||
Attribute,
|
Attribute,
|
||||||
Desc(S("asc_desc_rank")),
|
Desc(S("asc_desc_rank")),
|
||||||
Exactness,
|
Exactness,
|
||||||
Proximity,
|
Proximity,
|
||||||
Typo]);
|
Typo,
|
||||||
|
]);
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
let criteria_mix = {
|
let criteria_mix = {
|
||||||
|
Loading…
Reference in New Issue
Block a user