mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-23 02:27:40 +08:00
Merge #775
775: Fix clippy for Rust 1.67, allow `uninlined_format_args` r=dureuill a=dureuill # Pull Request milli part of https://github.com/meilisearch/meilisearch/pull/3437 Co-authored-by: Louis Dureuil <louis@meilisearch.com>
This commit is contained in:
commit
33f61d2cd4
1
.github/workflows/rust.yml
vendored
1
.github/workflows/rust.yml
vendored
@ -65,6 +65,7 @@ jobs:
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: -- --allow clippy::uninlined_format_args
|
||||
|
||||
fmt:
|
||||
name: Run Rustfmt
|
||||
|
@ -348,10 +348,10 @@ impl Index {
|
||||
/* external documents ids */
|
||||
|
||||
/// Writes the external documents ids and internal ids (i.e. `u32`).
|
||||
pub(crate) fn put_external_documents_ids<'a>(
|
||||
pub(crate) fn put_external_documents_ids(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
external_documents_ids: &ExternalDocumentsIds<'a>,
|
||||
external_documents_ids: &ExternalDocumentsIds<'_>,
|
||||
) -> heed::Result<()> {
|
||||
let ExternalDocumentsIds { hard, soft, .. } = external_documents_ids;
|
||||
let hard = hard.as_fst().as_bytes();
|
||||
@ -426,7 +426,7 @@ impl Index {
|
||||
}
|
||||
|
||||
/// Returns the `rtree` which associates coordinates to documents ids.
|
||||
pub fn geo_rtree<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<RTree<GeoPoint>>> {
|
||||
pub fn geo_rtree(&self, rtxn: &RoTxn) -> Result<Option<RTree<GeoPoint>>> {
|
||||
match self
|
||||
.main
|
||||
.get::<_, Str, SerdeBincode<RTree<GeoPoint>>>(rtxn, main_key::GEO_RTREE_KEY)?
|
||||
|
@ -182,15 +182,15 @@ impl<'t> Criterion for Proximity<'t> {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_candidates<'t>(
|
||||
ctx: &'t dyn Context,
|
||||
fn resolve_candidates(
|
||||
ctx: &dyn Context,
|
||||
query_tree: &Operation,
|
||||
proximity: u8,
|
||||
cache: &mut Cache,
|
||||
wdcache: &mut WordDerivationsCache,
|
||||
) -> Result<RoaringBitmap> {
|
||||
fn resolve_operation<'t>(
|
||||
ctx: &'t dyn Context,
|
||||
fn resolve_operation(
|
||||
ctx: &dyn Context,
|
||||
query_tree: &Operation,
|
||||
proximity: u8,
|
||||
cache: &mut Cache,
|
||||
@ -243,8 +243,8 @@ fn resolve_candidates<'t>(
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn mdfs_pair<'t>(
|
||||
ctx: &'t dyn Context,
|
||||
fn mdfs_pair(
|
||||
ctx: &dyn Context,
|
||||
left: &Operation,
|
||||
right: &Operation,
|
||||
proximity: u8,
|
||||
@ -298,8 +298,8 @@ fn resolve_candidates<'t>(
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn mdfs<'t>(
|
||||
ctx: &'t dyn Context,
|
||||
fn mdfs(
|
||||
ctx: &dyn Context,
|
||||
branches: &[Operation],
|
||||
proximity: u8,
|
||||
cache: &mut Cache,
|
||||
|
@ -239,15 +239,15 @@ fn alterate_query_tree(
|
||||
Ok(query_tree)
|
||||
}
|
||||
|
||||
fn resolve_candidates<'t>(
|
||||
ctx: &'t dyn Context,
|
||||
fn resolve_candidates(
|
||||
ctx: &dyn Context,
|
||||
query_tree: &Operation,
|
||||
number_typos: u8,
|
||||
cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
|
||||
wdcache: &mut WordDerivationsCache,
|
||||
) -> Result<RoaringBitmap> {
|
||||
fn resolve_operation<'t>(
|
||||
ctx: &'t dyn Context,
|
||||
fn resolve_operation(
|
||||
ctx: &dyn Context,
|
||||
query_tree: &Operation,
|
||||
number_typos: u8,
|
||||
cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
|
||||
@ -276,8 +276,8 @@ fn resolve_candidates<'t>(
|
||||
}
|
||||
}
|
||||
|
||||
fn mdfs<'t>(
|
||||
ctx: &'t dyn Context,
|
||||
fn mdfs(
|
||||
ctx: &dyn Context,
|
||||
branches: &[Operation],
|
||||
mana: u8,
|
||||
cache: &mut HashMap<(Operation, u8), RoaringBitmap>,
|
||||
|
@ -574,9 +574,9 @@ fn remove_from_word_docids(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_docids_from_field_id_docid_facet_value<'i, 'a>(
|
||||
index: &'i Index,
|
||||
wtxn: &'a mut heed::RwTxn,
|
||||
fn remove_docids_from_field_id_docid_facet_value(
|
||||
index: &Index,
|
||||
wtxn: &mut heed::RwTxn,
|
||||
facet_type: FacetType,
|
||||
field_id: FieldId,
|
||||
to_remove: &RoaringBitmap,
|
||||
|
@ -157,9 +157,9 @@ impl FacetsUpdateIncrementalInner {
|
||||
///
|
||||
/// ## Return
|
||||
/// See documentation of `insert_in_level`
|
||||
fn insert_in_level_0<'t>(
|
||||
fn insert_in_level_0(
|
||||
&self,
|
||||
txn: &'t mut RwTxn,
|
||||
txn: &mut RwTxn,
|
||||
field_id: u16,
|
||||
facet_value: &[u8],
|
||||
docids: &RoaringBitmap,
|
||||
@ -211,9 +211,9 @@ impl FacetsUpdateIncrementalInner {
|
||||
/// - `InsertionResult::Insert` means that inserting the `facet_value` into the `level` resulted
|
||||
/// in the addition of a new key in that level, and that therefore the number of children
|
||||
/// of the parent node should be incremented.
|
||||
fn insert_in_level<'t>(
|
||||
fn insert_in_level(
|
||||
&self,
|
||||
txn: &'t mut RwTxn,
|
||||
txn: &mut RwTxn,
|
||||
field_id: u16,
|
||||
level: u8,
|
||||
facet_value: &[u8],
|
||||
@ -348,9 +348,9 @@ impl FacetsUpdateIncrementalInner {
|
||||
}
|
||||
|
||||
/// Insert the given facet value and corresponding document ids in the database.
|
||||
pub fn insert<'t>(
|
||||
pub fn insert(
|
||||
&self,
|
||||
txn: &'t mut RwTxn,
|
||||
txn: &mut RwTxn,
|
||||
field_id: u16,
|
||||
facet_value: &[u8],
|
||||
docids: &RoaringBitmap,
|
||||
@ -470,9 +470,9 @@ impl FacetsUpdateIncrementalInner {
|
||||
/// in level 1, the key with the left bound `3` had to be changed to the next facet value (e.g. 4).
|
||||
/// In that case `DeletionResult::Reduce` is returned. The parent of the reduced key may need to adjust
|
||||
/// its left bound as well.
|
||||
fn delete_in_level<'t>(
|
||||
fn delete_in_level(
|
||||
&self,
|
||||
txn: &'t mut RwTxn,
|
||||
txn: &mut RwTxn,
|
||||
field_id: u16,
|
||||
level: u8,
|
||||
facet_value: &[u8],
|
||||
@ -529,9 +529,9 @@ impl FacetsUpdateIncrementalInner {
|
||||
}
|
||||
}
|
||||
|
||||
fn delete_in_level_0<'t>(
|
||||
fn delete_in_level_0(
|
||||
&self,
|
||||
txn: &'t mut RwTxn,
|
||||
txn: &mut RwTxn,
|
||||
field_id: u16,
|
||||
facet_value: &[u8],
|
||||
docids: &RoaringBitmap,
|
||||
@ -557,9 +557,9 @@ impl FacetsUpdateIncrementalInner {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete<'t>(
|
||||
pub fn delete(
|
||||
&self,
|
||||
txn: &'t mut RwTxn,
|
||||
txn: &mut RwTxn,
|
||||
field_id: u16,
|
||||
facet_value: &[u8],
|
||||
docids: &RoaringBitmap,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fs::File;
|
||||
use std::io::{self, Seek, SeekFrom};
|
||||
use std::io::{self, Seek};
|
||||
use std::time::Instant;
|
||||
|
||||
use grenad::{CompressionType, Sorter};
|
||||
@ -66,7 +66,7 @@ pub fn sorter_into_reader(
|
||||
|
||||
pub fn writer_into_reader(writer: grenad::Writer<File>) -> Result<grenad::Reader<File>> {
|
||||
let mut file = writer.into_inner()?;
|
||||
file.seek(SeekFrom::Start(0))?;
|
||||
file.rewind()?;
|
||||
grenad::Reader::new(file).map_err(Into::into)
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ use std::borrow::Cow;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs::File;
|
||||
use std::io::{Read, Seek, SeekFrom};
|
||||
use std::io::{Read, Seek};
|
||||
|
||||
use fxhash::FxHashMap;
|
||||
use heed::RoTxn;
|
||||
@ -510,7 +510,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
|
||||
let mut original_documents = writer.into_inner()?;
|
||||
// We then extract the file and reset the seek to be able to read it again.
|
||||
original_documents.seek(SeekFrom::Start(0))?;
|
||||
original_documents.rewind()?;
|
||||
|
||||
// We create a final writer to write the new documents in order from the sorter.
|
||||
let mut writer = create_writer(
|
||||
@ -522,7 +522,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
// into this writer, extract the file and reset the seek to be able to read it again.
|
||||
self.flattened_sorter.write_into_stream_writer(&mut writer)?;
|
||||
let mut flattened_documents = writer.into_inner()?;
|
||||
flattened_documents.seek(SeekFrom::Start(0))?;
|
||||
flattened_documents.rewind()?;
|
||||
|
||||
let mut new_external_documents_ids_builder: Vec<_> =
|
||||
self.new_external_documents_ids_builder.into_iter().collect();
|
||||
@ -650,10 +650,10 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
// Once we have written all the documents, we extract
|
||||
// the file and reset the seek to be able to read it again.
|
||||
let mut original_documents = original_writer.into_inner()?;
|
||||
original_documents.seek(SeekFrom::Start(0))?;
|
||||
original_documents.rewind()?;
|
||||
|
||||
let mut flattened_documents = flattened_writer.into_inner()?;
|
||||
flattened_documents.seek(SeekFrom::Start(0))?;
|
||||
flattened_documents.rewind()?;
|
||||
|
||||
let output = TransformOutput {
|
||||
primary_key,
|
||||
|
@ -7,15 +7,15 @@ fn set_stop_words(index: &Index, stop_words: &[&str]) {
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let config = IndexerConfig::default();
|
||||
|
||||
let mut builder = Settings::new(&mut wtxn, &index, &config);
|
||||
let stop_words = stop_words.into_iter().map(|s| s.to_string()).collect();
|
||||
let mut builder = Settings::new(&mut wtxn, index, &config);
|
||||
let stop_words = stop_words.iter().map(|s| s.to_string()).collect();
|
||||
builder.set_stop_words(stop_words);
|
||||
builder.execute(|_| (), || false).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
}
|
||||
|
||||
fn test_phrase_search_with_stop_words_given_criteria(criteria: &[Criterion]) {
|
||||
let index = super::setup_search_index_with_criteria(&criteria);
|
||||
let index = super::setup_search_index_with_criteria(criteria);
|
||||
|
||||
// Add stop_words
|
||||
set_stop_words(&index, &["a", "an", "the", "of"]);
|
||||
|
Loading…
Reference in New Issue
Block a user