rebase from master

This commit is contained in:
Quentin de Quelen 2020-02-02 22:59:19 +01:00 committed by qdequele
parent 2143226f04
commit dc6907e748
No known key found for this signature in database
GPG Key ID: B3F0A000EBF11745
29 changed files with 92 additions and 105 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

1
Cargo.lock generated
View File

@ -1022,7 +1022,6 @@ version = "0.8.4"
dependencies = [ dependencies = [
"assert-json-diff 1.0.1 (git+https://github.com/qdequele/assert-json-diff)", "assert-json-diff 1.0.1 (git+https://github.com/qdequele/assert-json-diff)",
"async-std 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "async-std 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -14,6 +14,7 @@ use meilisearch_types::DocIndex;
use sdset::{Set, SetBuf, exponential_search}; use sdset::{Set, SetBuf, exponential_search};
use slice_group_by::{GroupBy, GroupByMut}; use slice_group_by::{GroupBy, GroupByMut};
use crate::error::Error;
use crate::criterion::{Criteria, Context, ContextMut}; use crate::criterion::{Criteria, Context, ContextMut};
use crate::distinct_map::{BufferedDistinctMap, DistinctMap}; use crate::distinct_map::{BufferedDistinctMap, DistinctMap};
use crate::raw_document::RawDocument; use crate::raw_document::RawDocument;
@ -163,7 +164,7 @@ where
let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?; let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?;
let iter = raw_documents.into_iter().skip(range.start).take(range.len()); let iter = raw_documents.into_iter().skip(range.start).take(range.len());
let iter = iter.map(|rd| Document::from_raw(rd, &automatons, &arena, searchable_attrs.as_ref(), &schema)); let iter = iter.map(|rd| Document::from_raw(rd, &queries_kinds, &arena, searchable_attrs.as_ref(), &schema));
let documents = iter.collect(); let documents = iter.collect();
debug!("bucket sort took {:.02?}", before_bucket_sort.elapsed()); debug!("bucket sort took {:.02?}", before_bucket_sort.elapsed());
@ -349,7 +350,7 @@ where
}; };
if distinct_accepted && seen.len() > range.start { if distinct_accepted && seen.len() > range.start {
documents.push(Document::from_raw(raw_document, &queries_kinds, &arena, searchable_attrs.as_ref())); documents.push(Document::from_raw(raw_document, &queries_kinds, &arena, searchable_attrs.as_ref(), &schema));
if documents.len() == range.len() { if documents.len() == range.len() {
break; break;
} }

View File

@ -743,12 +743,12 @@ mod tests {
assert!(document.is_none()); assert!(document.is_none());
let document: Option<IgnoredAny> = index let document: Option<IgnoredAny> = index
.document(&reader, None, DocumentId(7900334843754999545)) .document(&reader, None, DocumentId(7_900_334_843_754_999_545))
.unwrap(); .unwrap();
assert!(document.is_some()); assert!(document.is_some());
let document: Option<IgnoredAny> = index let document: Option<IgnoredAny> = index
.document(&reader, None, DocumentId(8367468610878465872)) .document(&reader, None, DocumentId(8_367_468_610_878_465_872))
.unwrap(); .unwrap();
assert!(document.is_some()); assert!(document.is_some());
} }
@ -820,12 +820,12 @@ mod tests {
assert!(document.is_none()); assert!(document.is_none());
let document: Option<IgnoredAny> = index let document: Option<IgnoredAny> = index
.document(&reader, None, DocumentId(7900334843754999545)) .document(&reader, None, DocumentId(7_900_334_843_754_999_545))
.unwrap(); .unwrap();
assert!(document.is_some()); assert!(document.is_some());
let document: Option<IgnoredAny> = index let document: Option<IgnoredAny> = index
.document(&reader, None, DocumentId(8367468610878465872)) .document(&reader, None, DocumentId(8_367_468_610_878_465_872))
.unwrap(); .unwrap();
assert!(document.is_some()); assert!(document.is_some());
@ -862,7 +862,7 @@ mod tests {
let reader = db.main_read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let document: Option<serde_json::Value> = index let document: Option<serde_json::Value> = index
.document(&reader, None, DocumentId(7900334843754999545)) .document(&reader, None, DocumentId(7_900_334_843_754_999_545))
.unwrap(); .unwrap();
let new_doc1 = serde_json::json!({ let new_doc1 = serde_json::json!({
@ -873,7 +873,7 @@ mod tests {
assert_eq!(document, Some(new_doc1)); assert_eq!(document, Some(new_doc1));
let document: Option<serde_json::Value> = index let document: Option<serde_json::Value> = index
.document(&reader, None, DocumentId(8367468610878465872)) .document(&reader, None, DocumentId(8_367_468_610_878_465_872))
.unwrap(); .unwrap();
let new_doc2 = serde_json::json!({ let new_doc2 = serde_json::json!({
@ -1039,14 +1039,14 @@ mod tests {
assert_matches!( assert_matches!(
iter.next(), iter.next(),
Some(Document { Some(Document {
id: DocumentId(7900334843754999545), id: DocumentId(7_900_334_843_754_999_545),
.. ..
}) })
); );
assert_matches!( assert_matches!(
iter.next(), iter.next(),
Some(Document { Some(Document {
id: DocumentId(8367468610878465872), id: DocumentId(8_367_468_610_878_465_872),
.. ..
}) })
); );

View File

@ -2,6 +2,10 @@ use crate::serde::{DeserializerError, SerializerError};
use serde_json::Error as SerdeJsonError; use serde_json::Error as SerdeJsonError;
use std::{error, fmt, io}; use std::{error, fmt, io};
pub use heed::Error as HeedError;
pub use fst::Error as FstError;
pub use bincode::Error as BincodeError;
pub type MResult<T> = Result<T, Error>; pub type MResult<T> = Result<T, Error>;
#[derive(Debug)] #[derive(Debug)]
@ -35,14 +39,14 @@ impl From<meilisearch_schema::Error> for Error {
} }
} }
impl From<heed::Error> for Error { impl From<HeedError> for Error {
fn from(error: heed::Error) -> Error { fn from(error: HeedError) -> Error {
Error::Zlmdb(error) Error::Zlmdb(error)
} }
} }
impl From<fst::Error> for Error { impl From<FstError> for Error {
fn from(error: fst::Error) -> Error { fn from(error: FstError) -> Error {
Error::Fst(error) Error::Fst(error)
} }
} }
@ -53,8 +57,8 @@ impl From<SerdeJsonError> for Error {
} }
} }
impl From<bincode::Error> for Error { impl From<BincodeError> for Error {
fn from(error: bincode::Error) -> Error { fn from(error: BincodeError) -> Error {
Error::Bincode(error) Error::Bincode(error)
} }
} }

View File

@ -23,18 +23,20 @@ pub mod serde;
pub mod store; pub mod store;
pub use self::database::{BoxUpdateFn, Database, MainT, UpdateT}; pub use self::database::{BoxUpdateFn, Database, MainT, UpdateT};
pub use self::error::{Error, MResult}; pub use self::error::{Error, HeedError, FstError, MResult};
pub use self::number::{Number, ParseNumberError}; pub use self::number::{Number, ParseNumberError};
pub use self::ranked_map::RankedMap; pub use self::ranked_map::RankedMap;
pub use self::raw_document::RawDocument; pub use self::raw_document::RawDocument;
pub use self::store::Index; pub use self::store::Index;
pub use self::update::{EnqueuedUpdateResult, ProcessedUpdateResult, UpdateStatus, UpdateType}; pub use self::update::{EnqueuedUpdateResult, ProcessedUpdateResult, UpdateStatus, UpdateType};
pub use meilisearch_types::{DocIndex, DocumentId, Highlight}; pub use meilisearch_types::{DocIndex, DocumentId, Highlight};
pub use meilisearch_schema::Schema;
pub use query_words_mapper::QueryWordsMapper; pub use query_words_mapper::QueryWordsMapper;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::collections::HashMap; use std::collections::HashMap;
use compact_arena::SmallArena; use compact_arena::SmallArena;
use log::{error, trace};
use crate::bucket_sort::PostingsListView; use crate::bucket_sort::PostingsListView;
use crate::levenshtein::prefix_damerau_levenshtein; use crate::levenshtein::prefix_damerau_levenshtein;
@ -92,7 +94,7 @@ fn highlights_from_raw_document<'a, 'tag, 'txn>(
}; };
let highlight = Highlight { let highlight = Highlight {
attribute: attribute, attribute,
char_index: di.char_index, char_index: di.char_index,
char_length: covered_area, char_length: covered_area,
}; };

View File

@ -312,7 +312,7 @@ mod tests {
for ((docid, attr, _), count) in fields_counts { for ((docid, attr, _), count) in fields_counts {
let prev = index let prev = index
.documents_fields_counts .documents_fields_counts
.document_field_count(&mut writer, docid, IndexedPos(attr)) .document_field_count(&writer, docid, IndexedPos(attr))
.unwrap(); .unwrap();
let prev = prev.unwrap_or(0); let prev = prev.unwrap_or(0);

View File

@ -180,7 +180,7 @@ pub fn create_query_tree(
) -> MResult<(Operation, HashMap<QueryId, Range<usize>>)> ) -> MResult<(Operation, HashMap<QueryId, Range<usize>>)>
{ {
let words = split_query_string(query).map(str::to_lowercase); let words = split_query_string(query).map(str::to_lowercase);
let words: Vec<_> = words.into_iter().enumerate().collect(); let words: Vec<_> = words.enumerate().collect();
let mut mapper = QueryWordsMapper::new(words.iter().map(|(_, w)| w)); let mut mapper = QueryWordsMapper::new(words.iter().map(|(_, w)| w));

View File

@ -22,10 +22,10 @@ fn validate_number(value: &Number) -> Option<String> {
if value.is_f64() { if value.is_f64() {
return None return None
} }
return Some(value.to_string()) Some(value.to_string())
} }
fn validate_string(value: &String) -> Option<String> { fn validate_string(value: &str) -> Option<String> {
if value.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') { if value.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') {
Some(value.to_string()) Some(value.to_string())
} else { } else {

View File

@ -306,7 +306,6 @@ where
T: ser::Serialize, T: ser::Serialize,
{ {
let field_id = schema.get_or_create(&attribute)?; let field_id = schema.get_or_create(&attribute)?;
serialize_value_with_id( serialize_value_with_id(
txn, txn,
field_id, field_id,

View File

@ -49,7 +49,7 @@ impl Settings {
}; };
Ok(SettingsUpdate { Ok(SettingsUpdate {
ranking_rules: ranking_rules, ranking_rules,
ranking_distinct: settings.ranking_distinct.into(), ranking_distinct: settings.ranking_distinct.into(),
identifier: settings.identifier.into(), identifier: settings.identifier.into(),
searchable_attributes: settings.searchable_attributes.into(), searchable_attributes: settings.searchable_attributes.into(),

View File

@ -29,7 +29,7 @@ use std::{mem, ptr};
use heed::Result as ZResult; use heed::Result as ZResult;
use heed::{BytesEncode, BytesDecode}; use heed::{BytesEncode, BytesDecode};
use meilisearch_schema::{Schema, SchemaAttr}; use meilisearch_schema::{IndexedPos, FieldId};
use sdset::{Set, SetBuf}; use sdset::{Set, SetBuf};
use serde::de::{self, Deserialize}; use serde::de::{self, Deserialize};
use zerocopy::{AsBytes, FromBytes}; use zerocopy::{AsBytes, FromBytes};
@ -38,6 +38,7 @@ use crate::criterion::Criteria;
use crate::database::{MainT, UpdateT}; use crate::database::{MainT, UpdateT};
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::serde::Deserializer; use crate::serde::Deserializer;
use crate::settings::SettingsUpdate;
use crate::{query_builder::QueryBuilder, update, DocIndex, DocumentId, Error, MResult}; use crate::{query_builder::QueryBuilder, update, DocIndex, DocumentId, Error, MResult};
type BEU64 = zerocopy::U64<byteorder::BigEndian>; type BEU64 = zerocopy::U64<byteorder::BigEndian>;

View File

@ -19,7 +19,7 @@ pub struct PrefixKey {
impl PrefixKey { impl PrefixKey {
pub fn new(prefix: [u8; 4], index: u64, docid: u64) -> PrefixKey { pub fn new(prefix: [u8; 4], index: u64, docid: u64) -> PrefixKey {
PrefixKey { PrefixKey {
prefix: prefix, prefix,
index: BEU64::new(index), index: BEU64::new(index),
docid: BEU64::new(docid), docid: BEU64::new(docid),
} }

View File

@ -109,7 +109,7 @@ pub fn apply_documents_addition<'a, 'b>(
) -> MResult<()> { ) -> MResult<()> {
let mut documents_additions = HashMap::new(); let mut documents_additions = HashMap::new();
let schema = match index.main.schema(writer)? { let mut schema = match index.main.schema(writer)? {
Some(schema) => schema, Some(schema) => schema,
None => return Err(Error::SchemaMissing), None => return Err(Error::SchemaMissing),
}; };
@ -147,7 +147,7 @@ pub fn apply_documents_addition<'a, 'b>(
for (document_id, document) in documents_additions { for (document_id, document) in documents_additions {
let serializer = Serializer { let serializer = Serializer {
txn: writer, txn: writer,
schema: &schema, schema: &mut schema,
document_store: index.documents_fields, document_store: index.documents_fields,
document_fields_counts: index.documents_fields_counts, document_fields_counts: index.documents_fields_counts,
indexer: &mut indexer, indexer: &mut indexer,
@ -166,7 +166,7 @@ pub fn apply_documents_addition<'a, 'b>(
indexer, indexer,
)?; )?;
compute_short_prefixes(writer, index)?; index.main.put_schema(writer, &schema)?;
Ok(()) Ok(())
} }
@ -178,7 +178,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
) -> MResult<()> { ) -> MResult<()> {
let mut documents_additions = HashMap::new(); let mut documents_additions = HashMap::new();
let mut schema = match index.main.schema(writer)? {
Some(schema) => schema, Some(schema) => schema,
None => return Err(Error::SchemaMissing), None => return Err(Error::SchemaMissing),
}; };
@ -233,7 +233,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
for (document_id, document) in documents_additions { for (document_id, document) in documents_additions {
let serializer = Serializer { let serializer = Serializer {
txn: writer, txn: writer,
schema: &schema, schema: &mut schema,
document_store: index.documents_fields, document_store: index.documents_fields,
document_fields_counts: index.documents_fields_counts, document_fields_counts: index.documents_fields_counts,
indexer: &mut indexer, indexer: &mut indexer,
@ -252,7 +252,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
indexer, indexer,
)?; )?;
compute_short_prefixes(writer, index)?; index.main.put_schema(writer, &schema)?;
Ok(()) Ok(())
} }
@ -292,7 +292,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
for document_id in documents_ids { for document_id in documents_ids {
for result in index.documents_fields.document_fields(writer, *document_id)? { for result in index.documents_fields.document_fields(writer, *document_id)? {
let (attr, bytes) = result?; let (field_id, bytes) = result?;
let value: serde_json::Value = serde_json::from_slice(bytes)?; let value: serde_json::Value = serde_json::from_slice(bytes)?;
ram_store.insert((document_id, field_id), value); ram_store.insert((document_id, field_id), value);
} }
@ -322,7 +322,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
)?; )?;
} }
compute_short_prefixes(writer, index)?; index.main.put_schema(writer, &schema)?;
Ok(()) Ok(())
} }

View File

@ -130,22 +130,10 @@ pub fn apply_settings_update(
_ => (), _ => (),
} }
let main_store = index.main;
let documents_fields_store = index.documents_fields;
let documents_fields_counts_store = index.documents_fields_counts;
let postings_lists_store = index.postings_lists;
let docs_words_store = index.docs_words;
if must_reindex { if must_reindex {
reindex_all_documents( reindex_all_documents(writer, index)?;
writer,
main_store,
documents_fields_store,
documents_fields_counts_store,
postings_lists_store,
docs_words_store,
)?;
} }
if let UpdateState::Clear = settings.identifier { if let UpdateState::Clear = settings.identifier {
index.main.delete_schema(writer)?; index.main.delete_schema(writer)?;
} }
@ -158,10 +146,7 @@ pub fn apply_stop_words_update(
stop_words: BTreeSet<String>, stop_words: BTreeSet<String>,
) -> MResult<bool> { ) -> MResult<bool> {
let main_store = index.main; let old_stop_words: BTreeSet<String> = index.main
let mut must_reindex = false;
let old_stop_words: BTreeSet<String> = main_store
.stop_words_fst(writer)? .stop_words_fst(writer)?
.unwrap_or_default() .unwrap_or_default()
.stream() .stream()
@ -184,10 +169,9 @@ pub fn apply_stop_words_update(
index, index,
deletion deletion
)?; )?;
must_reindex = true; return Ok(true)
} }
Ok(false)
Ok(must_reindex)
} }
fn apply_stop_words_addition( fn apply_stop_words_addition(
@ -256,8 +240,6 @@ fn apply_stop_words_deletion(
deletion: BTreeSet<String>, deletion: BTreeSet<String>,
) -> MResult<()> { ) -> MResult<()> {
let main_store = index.main;
let mut stop_words_builder = SetBuilder::memory(); let mut stop_words_builder = SetBuilder::memory();
for word in deletion { for word in deletion {
@ -271,7 +253,7 @@ fn apply_stop_words_deletion(
.unwrap(); .unwrap();
// now we delete all of these stop words from the main store // now we delete all of these stop words from the main store
let stop_words_fst = main_store.stop_words_fst(writer)?.unwrap_or_default(); let stop_words_fst = index.main.stop_words_fst(writer)?.unwrap_or_default();
let op = OpBuilder::new() let op = OpBuilder::new()
.add(&stop_words_fst) .add(&stop_words_fst)
@ -285,7 +267,7 @@ fn apply_stop_words_deletion(
.and_then(fst::Set::from_bytes) .and_then(fst::Set::from_bytes)
.unwrap(); .unwrap();
Ok(main_store.put_stop_words_fst(writer, &stop_words_fst)?) Ok(index.main.put_stop_words_fst(writer, &stop_words_fst)?)
} }
pub fn apply_synonyms_update( pub fn apply_synonyms_update(

BIN
meilisearch-http/.DS_Store vendored Normal file

Binary file not shown.

View File

@ -15,17 +15,19 @@ path = "src/main.rs"
[dependencies] [dependencies]
async-std = { version = "1.0.1", features = ["attributes"] } async-std = { version = "1.0.1", features = ["attributes"] }
bincode = "1.2.0"
chrono = { version = "0.4.9", features = ["serde"] } chrono = { version = "0.4.9", features = ["serde"] }
crossbeam-channel = "0.4.0" crossbeam-channel = "0.4.0"
env_logger = "0.7.1" env_logger = "0.7.1"
futures = "0.3.1"
heed = "0.6.1" heed = "0.6.1"
http = "0.1.19" http = "0.1.19"
http-service = "0.4.0"
indexmap = { version = "1.3.0", features = ["serde-1"] } indexmap = { version = "1.3.0", features = ["serde-1"] }
log = "0.4.8" log = "0.4.8"
main_error = "0.1.0" main_error = "0.1.0"
meilisearch-core = { path = "../meilisearch-core", version = "0.8.4" } meilisearch-core = { path = "../meilisearch-core", version = "0.8.4" }
meilisearch-schema = { path = "../meilisearch-schema", version = "0.8.4" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.8.4" }
mime = "0.3.16"
pretty-bytes = "0.2.2" pretty-bytes = "0.2.2"
rand = "0.7.2" rand = "0.7.2"
rayon = "1.2.0" rayon = "1.2.0"
@ -39,9 +41,6 @@ tide = "0.6.0"
ureq = { version = "0.11.2", features = ["tls"], default-features = false } ureq = { version = "0.11.2", features = ["tls"], default-features = false }
walkdir = "2.2.9" walkdir = "2.2.9"
whoami = "0.6" whoami = "0.6"
http-service = "0.4.0"
futures = "0.3.1"
mime = "0.3.16"
[dev-dependencies] [dev-dependencies]
http-service-mock = "0.4.0" http-service-mock = "0.4.0"

View File

@ -5,6 +5,7 @@ use log::{error, warn};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tide::IntoResponse; use tide::IntoResponse;
use tide::Response; use tide::Response;
use meilisearch_core::{HeedError, FstError};
use crate::helpers::meilisearch::Error as SearchError; use crate::helpers::meilisearch::Error as SearchError;
@ -139,14 +140,14 @@ impl From<meilisearch_core::Error> for ResponseError {
} }
} }
impl From<heed::Error> for ResponseError { impl From<HeedError> for ResponseError {
fn from(err: heed::Error) -> ResponseError { fn from(err: HeedError) -> ResponseError {
ResponseError::internal(err) ResponseError::internal(err)
} }
} }
impl From<meilisearch_core::FstError> for ResponseError { impl From<FstError> for ResponseError {
fn from(err: meilisearch_core::FstError) -> ResponseError { fn from(err: FstError) -> ResponseError {
ResponseError::internal(err) ResponseError::internal(err)
} }
} }

View File

@ -38,7 +38,7 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
} }
}); });
app.at("/indexes/") app.at("/indexes")
.get(|ctx| into_response(index::list_indexes(ctx))) .get(|ctx| into_response(index::list_indexes(ctx)))
.post(|ctx| into_response(index::create_index(ctx))); .post(|ctx| into_response(index::create_index(ctx)));
@ -95,7 +95,7 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
.post(|ctx| into_response(setting::update_searchable(ctx))) .post(|ctx| into_response(setting::update_searchable(ctx)))
.delete(|ctx| into_response(setting::delete_searchable(ctx))); .delete(|ctx| into_response(setting::delete_searchable(ctx)));
app.at("/indexes/:index/settings/displayed-attribute") app.at("/indexes/:index/settings/displayed-attributes")
.get(|ctx| into_response(setting::displayed(ctx))) .get(|ctx| into_response(setting::displayed(ctx)))
.post(|ctx| into_response(setting::update_displayed(ctx))) .post(|ctx| into_response(setting::update_displayed(ctx)))
.delete(|ctx| into_response(setting::delete_displayed(ctx))); .delete(|ctx| into_response(setting::delete_displayed(ctx)));

BIN
meilisearch-http/tests/.DS_Store vendored Normal file

Binary file not shown.

BIN
meilisearch-http/tests/assets/.DS_Store vendored Normal file

Binary file not shown.

View File

@ -36,6 +36,7 @@ pub fn enrich_server_with_movies_index(
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let body = json!({ let body = json!({
"uid": "movies", "uid": "movies",
"identifier": "id",
}) })
.to_string() .to_string()
.into_bytes(); .into_bytes();
@ -114,7 +115,7 @@ pub fn enrich_server_with_movies_documents(
.unwrap(); .unwrap();
let _res = server.simulate(req).unwrap(); let _res = server.simulate(req).unwrap();
block_on(sleep(Duration::from_secs(5))); block_on(sleep(Duration::from_secs(10)));
Ok(()) Ok(())
} }

View File

@ -902,8 +902,8 @@ fn search_with_settings_synonyms() {
"Action", "Action",
"Science Fiction" "Science Fiction"
], ],
"poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg", "vote_count": 16056,
"vote_count": 16056 "poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg"
}, },
{ {
"id": 299534, "id": 299534,
@ -919,25 +919,25 @@ fn search_with_settings_synonyms() {
"Science Fiction", "Science Fiction",
"Action" "Action"
], ],
"poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg", "vote_count": 10497,
"vote_count": 10497 "poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg"
}, },
{ {
"id": 271110, "id": 99861,
"popularity": 37.431, "popularity": 33.938,
"vote_average": 7.4, "vote_average": 7.3,
"title": "Captain America: Civil War", "title": "Avengers: Age of Ultron",
"tagline": "Divided We Fall", "tagline": "A New Age Has Come.",
"overview": "Following the events of Age of Ultron, the collective governments of the world pass an act designed to regulate all superhuman activity. This polarizes opinion amongst the Avengers, causing two factions to side with Iron Man or Captain America, which causes an epic battle between former allies.", "overview": "When Tony Stark tries to jumpstart a dormant peacekeeping program, things go awry and Earths Mightiest Heroes are put to the ultimate test as the fate of the planet hangs in the balance. As the villainous Ultron emerges, it is up to The Avengers to stop him from enacting his terrible plans, and soon uneasy alliances and unexpected action pave the way for an epic and unique global adventure.",
"director": "Anthony Russo", "director": "Joss Whedon",
"producer": "Kevin Feige", "producer": "Kevin Feige",
"genres": [ "genres": [
"Adventure",
"Action", "Action",
"Adventure",
"Science Fiction" "Science Fiction"
], ],
"poster_path": "https://image.tmdb.org/t/p/w500/kSBXou5Ac7vEqKd97wotJumyJvU.jpg", "vote_count": 14661,
"vote_count": 15079 "poster_path": "https://image.tmdb.org/t/p/w500/t90Y3G8UGQp0f0DrP60wRu9gfrH.jpg"
} }
]); ]);

View File

@ -50,9 +50,9 @@ fn write_all_and_delete() {
"dsc(rank)", "dsc(rank)",
], ],
"rankingDistinct": "movie_id", "rankingDistinct": "movie_id",
"identifier": "uid", "identifier": "id",
"searchableAttributes": [ "searchableAttributes": [
"uid", "id",
"movie_id", "movie_id",
"title", "title",
"description", "description",

View File

@ -73,7 +73,7 @@ fn write_all_and_delete() {
let mut buf = Vec::new(); let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap(); block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap(); let res_value: Value = serde_json::from_slice(&buf).unwrap();
println!("1: {:?} vs {:?}", json, res_value);
assert_json_eq!(json, res_value, ordered: false); assert_json_eq!(json, res_value, ordered: false);
// 4 - Delete all settings // 4 - Delete all settings
@ -102,7 +102,7 @@ fn write_all_and_delete() {
"rankingRules": null, "rankingRules": null,
"rankingDistinct": null, "rankingDistinct": null,
}); });
println!("2: {:?} vs {:?}", json, res_value);
assert_json_eq!(json, res_value, ordered: false); assert_json_eq!(json, res_value, ordered: false);
} }

View File

@ -24,7 +24,7 @@ impl FieldsMap {
if let Some(id) = self.name_map.get(name) { if let Some(id) = self.name_map.get(name) {
return Ok(*id) return Ok(*id)
} }
let id = self.next_id.into(); let id = self.next_id;
self.next_id = self.next_id.next()?; self.next_id = self.next_id.next()?;
self.name_map.insert(name.to_string(), id); self.name_map.insert(name.to_string(), id);
self.id_map.insert(id, name.to_string()); self.id_map.insert(id, name.to_string());
@ -39,7 +39,7 @@ impl FieldsMap {
} }
pub fn id(&self, name: &str) -> Option<FieldId> { pub fn id(&self, name: &str) -> Option<FieldId> {
self.name_map.get(name).map(|s| *s) self.name_map.get(name).copied()
} }
pub fn name<I: Into<FieldId>>(&self, id: I) -> Option<&str> { pub fn name<I: Into<FieldId>>(&self, id: I) -> Option<&str> {

View File

@ -21,7 +21,7 @@ pub struct Schema {
impl Schema { impl Schema {
pub fn with_identifier(name: &str) -> Schema { pub fn with_identifier(name: &str) -> Schema {
let mut fields_map = FieldsMap::default(); let mut fields_map = FieldsMap::default();
let field_id = fields_map.insert(name.into()).unwrap(); let field_id = fields_map.insert(name).unwrap();
Schema { Schema {
fields_map, fields_map,
@ -57,7 +57,7 @@ impl Schema {
} }
pub fn contains(&self, name: &str) -> bool { pub fn contains(&self, name: &str) -> bool {
self.fields_map.id(name.into()).is_some() self.fields_map.id(name).is_some()
} }
pub fn get_or_create_empty(&mut self, name: &str) -> SResult<FieldId> { pub fn get_or_create_empty(&mut self, name: &str) -> SResult<FieldId> {
@ -65,16 +65,16 @@ impl Schema {
} }
pub fn get_or_create(&mut self, name: &str) -> SResult<FieldId> { pub fn get_or_create(&mut self, name: &str) -> SResult<FieldId> {
match self.fields_map.id(name.clone()) { match self.fields_map.id(name) {
Some(id) => { Some(id) => {
Ok(id) Ok(id)
} }
None => { None => {
if self.index_new_fields { if self.index_new_fields {
self.set_indexed(name.clone())?; self.set_indexed(name)?;
self.set_displayed(name) self.set_displayed(name)
} else { } else {
self.fields_map.insert(name.clone()) self.fields_map.insert(name)
} }
} }
} }
@ -105,19 +105,19 @@ impl Schema {
} }
pub fn set_ranked(&mut self, name: &str) -> SResult<FieldId> { pub fn set_ranked(&mut self, name: &str) -> SResult<FieldId> {
let id = self.fields_map.insert(name.into())?; let id = self.fields_map.insert(name)?;
self.ranked.insert(id); self.ranked.insert(id);
Ok(id) Ok(id)
} }
pub fn set_displayed(&mut self, name: &str) -> SResult<FieldId> { pub fn set_displayed(&mut self, name: &str) -> SResult<FieldId> {
let id = self.fields_map.insert(name.into())?; let id = self.fields_map.insert(name)?;
self.displayed.insert(id); self.displayed.insert(id);
Ok(id) Ok(id)
} }
pub fn set_indexed(&mut self, name: &str) -> SResult<(FieldId, IndexedPos)> { pub fn set_indexed(&mut self, name: &str) -> SResult<(FieldId, IndexedPos)> {
let id = self.fields_map.insert(name.into())?; let id = self.fields_map.insert(name)?;
if let Some(indexed_pos) = self.indexed_map.get(&id) { if let Some(indexed_pos) = self.indexed_map.get(&id) {
return Ok((id, *indexed_pos)) return Ok((id, *indexed_pos))
}; };
@ -128,19 +128,19 @@ impl Schema {
} }
pub fn remove_ranked(&mut self, name: &str) { pub fn remove_ranked(&mut self, name: &str) {
if let Some(id) = self.fields_map.id(name.into()) { if let Some(id) = self.fields_map.id(name) {
self.ranked.remove(&id); self.ranked.remove(&id);
} }
} }
pub fn remove_displayed(&mut self, name: &str) { pub fn remove_displayed(&mut self, name: &str) {
if let Some(id) = self.fields_map.id(name.into()) { if let Some(id) = self.fields_map.id(name) {
self.displayed.remove(&id); self.displayed.remove(&id);
} }
} }
pub fn remove_indexed(&mut self, name: &str) { pub fn remove_indexed(&mut self, name: &str) {
if let Some(id) = self.fields_map.id(name.into()) { if let Some(id) = self.fields_map.id(name) {
self.indexed_map.remove(&id); self.indexed_map.remove(&id);
self.indexed.retain(|x| *x != id); self.indexed.retain(|x| *x != id);
} }