mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-26 20:15:07 +08:00
Merge pull request #801 from MarinPostma/make-clippy-happy
Make clippy happy
This commit is contained in:
commit
8309e00ed3
@ -39,7 +39,7 @@ fn prepare_database(path: &Path) -> Database {
|
|||||||
let file = File::open(path).unwrap();
|
let file = File::open(path).unwrap();
|
||||||
let reader = BufReader::new(file);
|
let reader = BufReader::new(file);
|
||||||
let settings: Settings = serde_json::from_reader(reader).unwrap();
|
let settings: Settings = serde_json::from_reader(reader).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
db.update_write::<_, _, Box<dyn Error>>(|writer| {
|
db.update_write::<_, _, Box<dyn Error>>(|writer| {
|
||||||
|
@ -123,7 +123,7 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box<dy
|
|||||||
let settings = {
|
let settings = {
|
||||||
let string = fs::read_to_string(&command.settings)?;
|
let string = fs::read_to_string(&command.settings)?;
|
||||||
let settings: Settings = serde_json::from_str(&string).unwrap();
|
let settings: Settings = serde_json::from_str(&string).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
db.update_write(|w| index.settings_update(w, settings))?;
|
db.update_write(|w| index.settings_update(w, settings))?;
|
||||||
|
@ -19,7 +19,7 @@ use crate::criterion::{Criteria, Context, ContextMut};
|
|||||||
use crate::distinct_map::{BufferedDistinctMap, DistinctMap};
|
use crate::distinct_map::{BufferedDistinctMap, DistinctMap};
|
||||||
use crate::raw_document::RawDocument;
|
use crate::raw_document::RawDocument;
|
||||||
use crate::{database::MainT, reordered_attrs::ReorderedAttrs};
|
use crate::{database::MainT, reordered_attrs::ReorderedAttrs};
|
||||||
use crate::{store, Document, DocumentId, MResult};
|
use crate::{Document, DocumentId, MResult, Index};
|
||||||
use crate::query_tree::{create_query_tree, traverse_query_tree};
|
use crate::query_tree::{create_query_tree, traverse_query_tree};
|
||||||
use crate::query_tree::{Operation, QueryResult, QueryKind, QueryId, PostingsKey};
|
use crate::query_tree::{Operation, QueryResult, QueryKind, QueryId, PostingsKey};
|
||||||
use crate::query_tree::Context as QTContext;
|
use crate::query_tree::Context as QTContext;
|
||||||
@ -33,6 +33,7 @@ pub struct SortResult {
|
|||||||
pub exhaustive_facets_count: Option<bool>,
|
pub exhaustive_facets_count: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn bucket_sort<'c, FI>(
|
pub fn bucket_sort<'c, FI>(
|
||||||
reader: &heed::RoTxn<MainT>,
|
reader: &heed::RoTxn<MainT>,
|
||||||
query: &str,
|
query: &str,
|
||||||
@ -42,12 +43,7 @@ pub fn bucket_sort<'c, FI>(
|
|||||||
filter: Option<FI>,
|
filter: Option<FI>,
|
||||||
criteria: Criteria<'c>,
|
criteria: Criteria<'c>,
|
||||||
searchable_attrs: Option<ReorderedAttrs>,
|
searchable_attrs: Option<ReorderedAttrs>,
|
||||||
main_store: store::Main,
|
index: &Index,
|
||||||
postings_lists_store: store::PostingsLists,
|
|
||||||
documents_fields_counts_store: store::DocumentsFieldsCounts,
|
|
||||||
synonyms_store: store::Synonyms,
|
|
||||||
prefix_documents_cache_store: store::PrefixDocumentsCache,
|
|
||||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
|
||||||
) -> MResult<SortResult>
|
) -> MResult<SortResult>
|
||||||
where
|
where
|
||||||
FI: Fn(DocumentId) -> bool,
|
FI: Fn(DocumentId) -> bool,
|
||||||
@ -68,26 +64,21 @@ where
|
|||||||
distinct_size,
|
distinct_size,
|
||||||
criteria,
|
criteria,
|
||||||
searchable_attrs,
|
searchable_attrs,
|
||||||
main_store,
|
index,
|
||||||
postings_lists_store,
|
|
||||||
documents_fields_counts_store,
|
|
||||||
synonyms_store,
|
|
||||||
prefix_documents_cache_store,
|
|
||||||
prefix_postings_lists_cache_store,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut result = SortResult::default();
|
let mut result = SortResult::default();
|
||||||
|
|
||||||
let words_set = main_store.words_fst(reader)?;
|
let words_set = index.main.words_fst(reader)?;
|
||||||
let stop_words = main_store.stop_words_fst(reader)?;
|
let stop_words = index.main.stop_words_fst(reader)?;
|
||||||
|
|
||||||
let context = QTContext {
|
let context = QTContext {
|
||||||
words_set,
|
words_set,
|
||||||
stop_words,
|
stop_words,
|
||||||
synonyms: synonyms_store,
|
synonyms: index.synonyms,
|
||||||
postings_lists: postings_lists_store,
|
postings_lists: index.postings_lists,
|
||||||
prefix_postings_lists: prefix_postings_lists_cache_store,
|
prefix_postings_lists: index.prefix_postings_lists_cache,
|
||||||
};
|
};
|
||||||
|
|
||||||
let (operation, mapping) = create_query_tree(reader, &context, query)?;
|
let (operation, mapping) = create_query_tree(reader, &context, query)?;
|
||||||
@ -156,7 +147,7 @@ where
|
|||||||
reader,
|
reader,
|
||||||
postings_lists: &mut arena,
|
postings_lists: &mut arena,
|
||||||
query_mapping: &mapping,
|
query_mapping: &mapping,
|
||||||
documents_fields_counts_store,
|
documents_fields_counts_store: index.documents_fields_counts,
|
||||||
};
|
};
|
||||||
|
|
||||||
criterion.prepare(ctx, &mut group)?;
|
criterion.prepare(ctx, &mut group)?;
|
||||||
@ -189,7 +180,7 @@ where
|
|||||||
debug!("criterion loop took {:.02?}", before_criterion_loop.elapsed());
|
debug!("criterion loop took {:.02?}", before_criterion_loop.elapsed());
|
||||||
debug!("proximity evaluation called {} times", proximity_count.load(Ordering::Relaxed));
|
debug!("proximity evaluation called {} times", proximity_count.load(Ordering::Relaxed));
|
||||||
|
|
||||||
let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?;
|
let schema = index.main.schema(reader)?.ok_or(Error::SchemaMissing)?;
|
||||||
let iter = raw_documents.into_iter().skip(range.start).take(range.len());
|
let iter = raw_documents.into_iter().skip(range.start).take(range.len());
|
||||||
let iter = iter.map(|rd| Document::from_raw(rd, &queries_kinds, &arena, searchable_attrs.as_ref(), &schema));
|
let iter = iter.map(|rd| Document::from_raw(rd, &queries_kinds, &arena, searchable_attrs.as_ref(), &schema));
|
||||||
let documents = iter.collect();
|
let documents = iter.collect();
|
||||||
@ -202,6 +193,7 @@ where
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||||
reader: &heed::RoTxn<MainT>,
|
reader: &heed::RoTxn<MainT>,
|
||||||
query: &str,
|
query: &str,
|
||||||
@ -213,12 +205,7 @@ pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
|||||||
distinct_size: usize,
|
distinct_size: usize,
|
||||||
criteria: Criteria<'c>,
|
criteria: Criteria<'c>,
|
||||||
searchable_attrs: Option<ReorderedAttrs>,
|
searchable_attrs: Option<ReorderedAttrs>,
|
||||||
main_store: store::Main,
|
index: &Index,
|
||||||
postings_lists_store: store::PostingsLists,
|
|
||||||
documents_fields_counts_store: store::DocumentsFieldsCounts,
|
|
||||||
synonyms_store: store::Synonyms,
|
|
||||||
_prefix_documents_cache_store: store::PrefixDocumentsCache,
|
|
||||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
|
||||||
) -> MResult<SortResult>
|
) -> MResult<SortResult>
|
||||||
where
|
where
|
||||||
FI: Fn(DocumentId) -> bool,
|
FI: Fn(DocumentId) -> bool,
|
||||||
@ -226,15 +213,15 @@ where
|
|||||||
{
|
{
|
||||||
let mut result = SortResult::default();
|
let mut result = SortResult::default();
|
||||||
|
|
||||||
let words_set = main_store.words_fst(reader)?;
|
let words_set = index.main.words_fst(reader)?;
|
||||||
let stop_words = main_store.stop_words_fst(reader)?;
|
let stop_words = index.main.stop_words_fst(reader)?;
|
||||||
|
|
||||||
let context = QTContext {
|
let context = QTContext {
|
||||||
words_set,
|
words_set,
|
||||||
stop_words,
|
stop_words,
|
||||||
synonyms: synonyms_store,
|
synonyms: index.synonyms,
|
||||||
postings_lists: postings_lists_store,
|
postings_lists: index.postings_lists,
|
||||||
prefix_postings_lists: prefix_postings_lists_cache_store,
|
prefix_postings_lists: index.prefix_postings_lists_cache,
|
||||||
};
|
};
|
||||||
|
|
||||||
let (operation, mapping) = create_query_tree(reader, &context, query)?;
|
let (operation, mapping) = create_query_tree(reader, &context, query)?;
|
||||||
@ -313,7 +300,7 @@ where
|
|||||||
reader,
|
reader,
|
||||||
postings_lists: &mut arena,
|
postings_lists: &mut arena,
|
||||||
query_mapping: &mapping,
|
query_mapping: &mapping,
|
||||||
documents_fields_counts_store,
|
documents_fields_counts_store: index.documents_fields_counts,
|
||||||
};
|
};
|
||||||
|
|
||||||
let before_criterion_preparation = Instant::now();
|
let before_criterion_preparation = Instant::now();
|
||||||
@ -378,7 +365,7 @@ where
|
|||||||
// once we classified the documents related to the current
|
// once we classified the documents related to the current
|
||||||
// automatons we save that as the next valid result
|
// automatons we save that as the next valid result
|
||||||
let mut seen = BufferedDistinctMap::new(&mut distinct_map);
|
let mut seen = BufferedDistinctMap::new(&mut distinct_map);
|
||||||
let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?;
|
let schema = index.main.schema(reader)?.ok_or(Error::SchemaMissing)?;
|
||||||
|
|
||||||
let mut documents = Vec::with_capacity(range.len());
|
let mut documents = Vec::with_capacity(range.len());
|
||||||
for raw_document in raw_documents.into_iter().skip(distinct_raw_offset) {
|
for raw_document in raw_documents.into_iter().skip(distinct_raw_offset) {
|
||||||
|
@ -92,6 +92,7 @@ impl<'a> CriteriaBuilder<'a> {
|
|||||||
self.inner.reserve(additional)
|
self.inner.reserve(additional)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::should_implement_trait)]
|
||||||
pub fn add<C: 'a>(mut self, criterion: C) -> CriteriaBuilder<'a>
|
pub fn add<C: 'a>(mut self, criterion: C) -> CriteriaBuilder<'a>
|
||||||
where
|
where
|
||||||
C: Criterion,
|
C: Criterion,
|
||||||
|
@ -22,6 +22,7 @@ impl Criterion for Typo {
|
|||||||
// It is safe to panic on input number higher than 3,
|
// It is safe to panic on input number higher than 3,
|
||||||
// the number of typos is never bigger than that.
|
// the number of typos is never bigger than that.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(clippy::approx_constant)]
|
||||||
fn custom_log10(n: u8) -> f32 {
|
fn custom_log10(n: u8) -> f32 {
|
||||||
match n {
|
match n {
|
||||||
0 => 0.0, // log(1)
|
0 => 0.0, // log(1)
|
||||||
|
@ -82,8 +82,7 @@ fn update_awaiter(
|
|||||||
update_fn: Arc<ArcSwapFn>,
|
update_fn: Arc<ArcSwapFn>,
|
||||||
index: Index,
|
index: Index,
|
||||||
) -> MResult<()> {
|
) -> MResult<()> {
|
||||||
let mut receiver = receiver.into_iter();
|
for event in receiver {
|
||||||
while let Some(event) = receiver.next() {
|
|
||||||
|
|
||||||
// if we receive a *MustClear* event, clear the index and break the loop
|
// if we receive a *MustClear* event, clear the index and break the loop
|
||||||
if let UpdateEvent::MustClear = event {
|
if let UpdateEvent::MustClear = event {
|
||||||
@ -547,7 +546,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut update_writer = db.update_write_txn().unwrap();
|
let mut update_writer = db.update_write_txn().unwrap();
|
||||||
@ -610,7 +609,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut update_writer = db.update_write_txn().unwrap();
|
let mut update_writer = db.update_write_txn().unwrap();
|
||||||
@ -672,7 +671,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut update_writer = db.update_write_txn().unwrap();
|
let mut update_writer = db.update_write_txn().unwrap();
|
||||||
@ -727,7 +726,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut update_writer = db.update_write_txn().unwrap();
|
let mut update_writer = db.update_write_txn().unwrap();
|
||||||
@ -763,7 +762,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut writer = db.update_write_txn().unwrap();
|
let mut writer = db.update_write_txn().unwrap();
|
||||||
@ -829,7 +828,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut writer = db.update_write_txn().unwrap();
|
let mut writer = db.update_write_txn().unwrap();
|
||||||
@ -871,7 +870,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut writer = db.update_write_txn().unwrap();
|
let mut writer = db.update_write_txn().unwrap();
|
||||||
@ -951,7 +950,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut writer = db.update_write_txn().unwrap();
|
let mut writer = db.update_write_txn().unwrap();
|
||||||
@ -1090,7 +1089,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut writer = db.update_write_txn().unwrap();
|
let mut writer = db.update_write_txn().unwrap();
|
||||||
@ -1166,7 +1165,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||||
settings.into_update().unwrap()
|
settings.to_update().unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut writer = db.update_write_txn().unwrap();
|
let mut writer = db.update_write_txn().unwrap();
|
||||||
|
@ -70,7 +70,7 @@ impl FacetFilter {
|
|||||||
bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value).into()),
|
bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value).into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Ok(Self(filter));
|
Ok(Self(filter))
|
||||||
}
|
}
|
||||||
bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value).into()),
|
bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value).into()),
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,7 @@ struct ConditionValue<'a> {
|
|||||||
|
|
||||||
impl<'a> ConditionValue<'a> {
|
impl<'a> ConditionValue<'a> {
|
||||||
pub fn new(value: &Pair<'a, Rule>) -> Self {
|
pub fn new(value: &Pair<'a, Rule>) -> Self {
|
||||||
let value = match value.as_rule() {
|
match value.as_rule() {
|
||||||
Rule::string | Rule::word => {
|
Rule::string | Rule::word => {
|
||||||
let string = value.as_str();
|
let string = value.as_str();
|
||||||
let boolean = match value.as_str() {
|
let boolean = match value.as_str() {
|
||||||
@ -43,12 +43,11 @@ impl<'a> ConditionValue<'a> {
|
|||||||
ConditionValue { string, boolean, number }
|
ConditionValue { string, boolean, number }
|
||||||
},
|
},
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
}
|
||||||
value
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_str(&self) -> &str {
|
pub fn as_str(&self) -> &str {
|
||||||
self.string.as_ref()
|
self.string
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_number(&self) -> Option<&Number> {
|
pub fn as_number(&self) -> Option<&Number> {
|
||||||
@ -73,7 +72,7 @@ fn get_field_value<'a>(schema: &Schema, pair: Pair<'a, Rule>) -> Result<(FieldId
|
|||||||
let key = items.next().unwrap();
|
let key = items.next().unwrap();
|
||||||
let field = schema
|
let field = schema
|
||||||
.id(key.as_str())
|
.id(key.as_str())
|
||||||
.ok_or::<PestError<Rule>>(PestError::new_from_span(
|
.ok_or_else(|| PestError::new_from_span(
|
||||||
ErrorVariant::CustomError {
|
ErrorVariant::CustomError {
|
||||||
message: format!(
|
message: format!(
|
||||||
"attribute `{}` not found, available attributes are: {}",
|
"attribute `{}` not found, available attributes are: {}",
|
||||||
|
@ -26,7 +26,7 @@ pub enum Filter<'a> {
|
|||||||
|
|
||||||
impl<'a> Filter<'a> {
|
impl<'a> Filter<'a> {
|
||||||
pub fn parse(expr: &'a str, schema: &'a Schema) -> FilterResult<'a> {
|
pub fn parse(expr: &'a str, schema: &'a Schema) -> FilterResult<'a> {
|
||||||
let mut lexed = FilterParser::parse(Rule::prgm, expr.as_ref())?;
|
let mut lexed = FilterParser::parse(Rule::prgm, expr)?;
|
||||||
Self::build(lexed.next().unwrap().into_inner(), schema)
|
Self::build(lexed.next().unwrap().into_inner(), schema)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![allow(clippy::type_complexity)]
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate assert_matches;
|
extern crate assert_matches;
|
||||||
|
@ -6,7 +6,7 @@ use std::str::FromStr;
|
|||||||
use ordered_float::OrderedFloat;
|
use ordered_float::OrderedFloat;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Copy, Clone, Hash)]
|
#[derive(Serialize, Deserialize, Debug, Copy, Clone)]
|
||||||
pub enum Number {
|
pub enum Number {
|
||||||
Unsigned(u64),
|
Unsigned(u64),
|
||||||
Signed(i64),
|
Signed(i64),
|
||||||
|
@ -152,12 +152,7 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
|||||||
distinct_size,
|
distinct_size,
|
||||||
self.criteria,
|
self.criteria,
|
||||||
self.searchable_attrs,
|
self.searchable_attrs,
|
||||||
self.index.main,
|
self.index,
|
||||||
self.index.postings_lists,
|
|
||||||
self.index.documents_fields_counts,
|
|
||||||
self.index.synonyms,
|
|
||||||
self.index.prefix_documents_cache,
|
|
||||||
self.index.prefix_postings_lists_cache,
|
|
||||||
),
|
),
|
||||||
None => bucket_sort(
|
None => bucket_sort(
|
||||||
reader,
|
reader,
|
||||||
@ -168,12 +163,7 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
|||||||
self.filter,
|
self.filter,
|
||||||
self.criteria,
|
self.criteria,
|
||||||
self.searchable_attrs,
|
self.searchable_attrs,
|
||||||
self.index.main,
|
self.index,
|
||||||
self.index.postings_lists,
|
|
||||||
self.index.documents_fields_counts,
|
|
||||||
self.index.synonyms,
|
|
||||||
self.index.prefix_documents_cache,
|
|
||||||
self.index.prefix_postings_lists_cache,
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,7 @@ impl QueryWordsMapper {
|
|||||||
QueryWordsMapper { originals, mappings: HashMap::new() }
|
QueryWordsMapper { originals, mappings: HashMap::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::len_zero)]
|
||||||
pub fn declare<I, A>(&mut self, range: Range<usize>, id: QueryId, replacement: I)
|
pub fn declare<I, A>(&mut self, range: Range<usize>, id: QueryId, replacement: I)
|
||||||
where I: IntoIterator<Item = A>,
|
where I: IntoIterator<Item = A>,
|
||||||
A: ToString,
|
A: ToString,
|
||||||
@ -53,7 +54,7 @@ impl QueryWordsMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let replacement = replacement[common_left..replacement.len() - common_right].iter().cloned().collect();
|
let replacement = replacement[common_left..replacement.len() - common_right].to_vec();
|
||||||
self.mappings.insert(id + common_left, (range.clone(), replacement));
|
self.mappings.insert(id + common_left, (range.clone(), replacement));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,8 +10,7 @@ use self::RankingRule::*;
|
|||||||
pub const DEFAULT_RANKING_RULES: [RankingRule; 6] = [Typo, Words, Proximity, Attribute, WordsPosition, Exactness];
|
pub const DEFAULT_RANKING_RULES: [RankingRule; 6] = [Typo, Words, Proximity, Attribute, WordsPosition, Exactness];
|
||||||
|
|
||||||
static RANKING_RULE_REGEX: Lazy<regex::Regex> = Lazy::new(|| {
|
static RANKING_RULE_REGEX: Lazy<regex::Regex> = Lazy::new(|| {
|
||||||
let regex = regex::Regex::new(r"(asc|desc)\(([a-zA-Z0-9-_]*)\)").unwrap();
|
regex::Regex::new(r"(asc|desc)\(([a-zA-Z0-9-_]*)\)").unwrap()
|
||||||
regex
|
|
||||||
});
|
});
|
||||||
|
|
||||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||||
@ -44,11 +43,11 @@ fn deserialize_some<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Settings {
|
impl Settings {
|
||||||
pub fn into_update(&self) -> Result<SettingsUpdate, RankingRuleConversionError> {
|
pub fn to_update(&self) -> Result<SettingsUpdate, RankingRuleConversionError> {
|
||||||
let settings = self.clone();
|
let settings = self.clone();
|
||||||
|
|
||||||
let ranking_rules = match settings.ranking_rules {
|
let ranking_rules = match settings.ranking_rules {
|
||||||
Some(Some(rules)) => UpdateState::Update(RankingRule::from_iter(rules.iter())?),
|
Some(Some(rules)) => UpdateState::Update(RankingRule::try_from_iter(rules.iter())?),
|
||||||
Some(None) => UpdateState::Clear,
|
Some(None) => UpdateState::Clear,
|
||||||
None => UpdateState::Nothing,
|
None => UpdateState::Nothing,
|
||||||
};
|
};
|
||||||
@ -152,7 +151,7 @@ impl RankingRule {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_iter(rules: impl IntoIterator<Item = impl AsRef<str>>) -> Result<Vec<RankingRule>, RankingRuleConversionError> {
|
pub fn try_from_iter(rules: impl IntoIterator<Item = impl AsRef<str>>) -> Result<Vec<RankingRule>, RankingRuleConversionError> {
|
||||||
rules.into_iter()
|
rules.into_iter()
|
||||||
.map(|s| RankingRule::from_str(s.as_ref()))
|
.map(|s| RankingRule::from_str(s.as_ref()))
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -287,10 +287,10 @@ impl Main {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn distinct_attribute(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<FieldId>> {
|
pub fn distinct_attribute(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<FieldId>> {
|
||||||
if let Some(value) = self.main.get::<_, Str, OwnedType<u16>>(reader, DISTINCT_ATTRIBUTE_KEY)? {
|
match self.main.get::<_, Str, OwnedType<u16>>(reader, DISTINCT_ATTRIBUTE_KEY)? {
|
||||||
return Ok(Some(FieldId(value.to_owned())))
|
Some(value) => Ok(Some(FieldId(value.to_owned()))),
|
||||||
|
None => Ok(None),
|
||||||
}
|
}
|
||||||
return Ok(None)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>, value: FieldId) -> MResult<()> {
|
pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>, value: FieldId) -> MResult<()> {
|
||||||
|
@ -109,6 +109,7 @@ pub fn push_documents_addition<D: serde::Serialize>(
|
|||||||
Ok(last_update_id)
|
Ok(last_update_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn index_document<A>(
|
fn index_document<A>(
|
||||||
writer: &mut heed::RwTxn<MainT>,
|
writer: &mut heed::RwTxn<MainT>,
|
||||||
documents_fields: DocumentsFields,
|
documents_fields: DocumentsFields,
|
||||||
|
@ -72,7 +72,7 @@ impl Update {
|
|||||||
|
|
||||||
fn settings(data: SettingsUpdate) -> Update {
|
fn settings(data: SettingsUpdate) -> Update {
|
||||||
Update {
|
Update {
|
||||||
data: UpdateData::Settings(data),
|
data: UpdateData::Settings(Box::new(data)),
|
||||||
enqueued_at: Utc::now(),
|
enqueued_at: Utc::now(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -85,7 +85,7 @@ pub enum UpdateData {
|
|||||||
DocumentsAddition(Vec<IndexMap<String, Value>>),
|
DocumentsAddition(Vec<IndexMap<String, Value>>),
|
||||||
DocumentsPartial(Vec<IndexMap<String, Value>>),
|
DocumentsPartial(Vec<IndexMap<String, Value>>),
|
||||||
DocumentsDeletion(Vec<String>),
|
DocumentsDeletion(Vec<String>),
|
||||||
Settings(SettingsUpdate)
|
Settings(Box<SettingsUpdate>)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UpdateData {
|
impl UpdateData {
|
||||||
@ -117,7 +117,7 @@ pub enum UpdateType {
|
|||||||
DocumentsAddition { number: usize },
|
DocumentsAddition { number: usize },
|
||||||
DocumentsPartial { number: usize },
|
DocumentsPartial { number: usize },
|
||||||
DocumentsDeletion { number: usize },
|
DocumentsDeletion { number: usize },
|
||||||
Settings { settings: SettingsUpdate },
|
Settings { settings: Box<SettingsUpdate> },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@ -273,7 +273,7 @@ pub fn update_task<'a, 'b>(
|
|||||||
let result = apply_settings_update(
|
let result = apply_settings_update(
|
||||||
writer,
|
writer,
|
||||||
index,
|
index,
|
||||||
settings,
|
*settings,
|
||||||
);
|
);
|
||||||
|
|
||||||
(update_type, result, start.elapsed())
|
(update_type, result, start.elapsed())
|
||||||
|
@ -26,6 +26,7 @@ pub trait ErrorCode: std::error::Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::enum_variant_names)]
|
||||||
enum ErrorType {
|
enum ErrorType {
|
||||||
InternalError,
|
InternalError,
|
||||||
InvalidRequestError,
|
InvalidRequestError,
|
||||||
|
@ -43,6 +43,7 @@ pub struct LoggingMiddleware<S> {
|
|||||||
service: Rc<RefCell<S>>,
|
service: Rc<RefCell<S>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
impl<S, B> Service for LoggingMiddleware<S>
|
impl<S, B> Service for LoggingMiddleware<S>
|
||||||
where
|
where
|
||||||
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = actix_web::Error> + 'static,
|
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = actix_web::Error> + 'static,
|
||||||
|
@ -161,7 +161,7 @@ fn load_private_key(filename: PathBuf) -> Result<rustls::PrivateKey, Box<dyn err
|
|||||||
fn load_ocsp(filename: &Option<PathBuf>) -> Result<Vec<u8>, Box<dyn error::Error>> {
|
fn load_ocsp(filename: &Option<PathBuf>) -> Result<Vec<u8>, Box<dyn error::Error>> {
|
||||||
let mut ret = Vec::new();
|
let mut ret = Vec::new();
|
||||||
|
|
||||||
if let &Some(ref name) = filename {
|
if let Some(ref name) = filename {
|
||||||
fs::File::open(name)
|
fs::File::open(name)
|
||||||
.map_err(|_| "cannot open ocsp file")?
|
.map_err(|_| "cannot open ocsp file")?
|
||||||
.read_to_end(&mut ret)
|
.read_to_end(&mut ret)
|
||||||
|
@ -188,14 +188,12 @@ impl SearchQuery {
|
|||||||
for attr in &restricted_attributes {
|
for attr in &restricted_attributes {
|
||||||
final_attributes.insert(attr.to_string());
|
final_attributes.insert(attr.to_string());
|
||||||
}
|
}
|
||||||
} else {
|
} else if available_attributes.contains(attribute) {
|
||||||
if available_attributes.contains(attribute) {
|
|
||||||
final_attributes.insert(attribute.to_string());
|
final_attributes.insert(attribute.to_string());
|
||||||
} else {
|
} else {
|
||||||
warn!("The attributes {:?} present in attributesToHighlight parameter doesn't exist", attribute);
|
warn!("The attributes {:?} present in attributesToHighlight parameter doesn't exist", attribute);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
search_builder.attributes_to_highlight(final_attributes);
|
search_builder.attributes_to_highlight(final_attributes);
|
||||||
}
|
}
|
||||||
@ -246,6 +244,6 @@ fn prepare_facet_list(facets: &str, schema: &Schema, facet_attrs: &[FieldId]) ->
|
|||||||
}
|
}
|
||||||
Ok(field_ids)
|
Ok(field_ids)
|
||||||
}
|
}
|
||||||
bad_val => return Err(FacetCountError::unexpected_token(bad_val, &["[String]"]))
|
bad_val => Err(FacetCountError::unexpected_token(bad_val, &["[String]"]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,7 @@ async fn update_all(
|
|||||||
let update_id = data.db.update_write::<_, _, ResponseError>(|writer| {
|
let update_id = data.db.update_write::<_, _, ResponseError>(|writer| {
|
||||||
let settings = body
|
let settings = body
|
||||||
.into_inner()
|
.into_inner()
|
||||||
.into_update()
|
.to_update()
|
||||||
.map_err(Error::bad_request)?;
|
.map_err(Error::bad_request)?;
|
||||||
let update_id = index.settings_update(writer, settings)?;
|
let update_id = index.settings_update(writer, settings)?;
|
||||||
Ok(update_id)
|
Ok(update_id)
|
||||||
@ -211,7 +211,7 @@ async fn update_rules(
|
|||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||||
@ -282,7 +282,7 @@ async fn update_distinct(
|
|||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||||
@ -350,7 +350,7 @@ async fn update_searchable(
|
|||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||||
|
|
||||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||||
|
|
||||||
@ -421,7 +421,7 @@ async fn update_displayed(
|
|||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||||
@ -490,7 +490,7 @@ async fn update_accept_new_fields(
|
|||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||||
@ -549,7 +549,7 @@ async fn update_attributes_for_faceting(
|
|||||||
..Settings::default()
|
..Settings::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||||
|
@ -2,7 +2,7 @@ use crate::{FieldsMap, FieldId, SResult, Error, IndexedPos};
|
|||||||
use serde::{Serialize, Deserialize};
|
use serde::{Serialize, Deserialize};
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||||
pub struct Schema {
|
pub struct Schema {
|
||||||
fields_map: FieldsMap,
|
fields_map: FieldsMap,
|
||||||
|
|
||||||
@ -19,13 +19,8 @@ pub struct Schema {
|
|||||||
impl Schema {
|
impl Schema {
|
||||||
pub fn new() -> Schema {
|
pub fn new() -> Schema {
|
||||||
Schema {
|
Schema {
|
||||||
fields_map: FieldsMap::default(),
|
|
||||||
primary_key: None,
|
|
||||||
ranked: HashSet::new(),
|
|
||||||
displayed: HashSet::new(),
|
|
||||||
indexed: Vec::new(),
|
|
||||||
indexed_map: HashMap::new(),
|
|
||||||
accept_new_fields: true,
|
accept_new_fields: true,
|
||||||
|
..Default::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user