Introduce a rustfmt file

This commit is contained in:
Clément Renault 2022-10-20 18:00:07 +02:00
parent 52e858a588
commit 80b2e70ee7
No known key found for this signature in database
GPG Key ID: 92ADA4E935E71FA4
92 changed files with 1250 additions and 2855 deletions

5
.rustfmt.toml Normal file
View File

@ -0,0 +1,5 @@
unstable_features = true
use_small_heuristics = "max"
imports_granularity = "Module"
group_imports = "StdExternalCrate"

View File

@ -1,11 +1,9 @@
use meilisearch_types::{
error::ResponseError,
keys::Key,
milli::update::IndexDocumentsMethod,
settings::Unchecked,
tasks::{Details, KindWithContent, Status, Task, TaskId},
InstanceUid,
};
use meilisearch_types::error::ResponseError;
use meilisearch_types::keys::Key;
use meilisearch_types::milli::update::IndexDocumentsMethod;
use meilisearch_types::settings::Unchecked;
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task, TaskId};
use meilisearch_types::InstanceUid;
use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
@ -168,15 +166,8 @@ impl From<KindWithContent> for KindDump {
}
KindWithContent::DocumentClear { .. } => KindDump::DocumentClear,
KindWithContent::Settings {
new_settings,
is_deletion,
allow_index_creation,
..
} => KindDump::Settings {
settings: new_settings,
is_deletion,
allow_index_creation,
},
new_settings, is_deletion, allow_index_creation, ..
} => KindDump::Settings { settings: new_settings, is_deletion, allow_index_creation },
KindWithContent::IndexDeletion { .. } => KindDump::IndexDeletion,
KindWithContent::IndexCreation { primary_key, .. } => {
KindDump::IndexCreation { primary_key }
@ -191,15 +182,9 @@ impl From<KindWithContent> for KindDump {
KindWithContent::TaskDeletion { query, tasks } => {
KindDump::TasksDeletion { query, tasks }
}
KindWithContent::DumpExport {
dump_uid,
keys,
instance_uid,
} => KindDump::DumpExport {
dump_uid,
keys,
instance_uid,
},
KindWithContent::DumpExport { dump_uid, keys, instance_uid } => {
KindDump::DumpExport { dump_uid, keys, instance_uid }
}
KindWithContent::Snapshot => KindDump::Snapshot,
}
}
@ -207,29 +192,25 @@ impl From<KindWithContent> for KindDump {
#[cfg(test)]
pub(crate) mod test {
use std::{
fs::File,
io::{Seek, SeekFrom},
str::FromStr,
};
use std::fs::File;
use std::io::{Seek, SeekFrom};
use std::str::FromStr;
use big_s::S;
use maplit::btreeset;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::keys::{Action, Key};
use meilisearch_types::milli::{self, update::Setting};
use meilisearch_types::tasks::Status;
use meilisearch_types::{index_uid::IndexUid, star_or::StarOr};
use meilisearch_types::{
settings::{Checked, Settings},
tasks::Details,
};
use meilisearch_types::milli::update::Setting;
use meilisearch_types::milli::{self};
use meilisearch_types::settings::{Checked, Settings};
use meilisearch_types::star_or::StarOr;
use meilisearch_types::tasks::{Details, Status};
use serde_json::{json, Map, Value};
use time::macros::datetime;
use uuid::Uuid;
use crate::{
reader::Document, DumpReader, DumpWriter, IndexMetadata, KindDump, TaskDump, Version,
};
use crate::reader::Document;
use crate::{DumpReader, DumpWriter, IndexMetadata, KindDump, TaskDump, Version};
pub fn create_test_instance_uid() -> Uuid {
Uuid::parse_str("9e15e977-f2ae-4761-943f-1eaf75fd736d").unwrap()
@ -326,14 +307,8 @@ pub(crate) mod test {
finished_at: None,
},
Some(vec![
json!({ "id": 4, "race": "leonberg" })
.as_object()
.unwrap()
.clone(),
json!({ "id": 5, "race": "patou" })
.as_object()
.unwrap()
.clone(),
json!({ "id": 4, "race": "leonberg" }).as_object().unwrap().clone(),
json!({ "id": 5, "race": "patou" }).as_object().unwrap().clone(),
]),
),
(
@ -397,9 +372,7 @@ pub(crate) mod test {
let documents = create_test_documents();
let settings = create_test_settings();
let mut index = dump
.create_index("doggos", &create_test_index_metadata())
.unwrap();
let mut index = dump.create_index("doggos", &create_test_index_metadata()).unwrap();
for document in &documents {
index.push_document(document).unwrap();
}
@ -445,10 +418,7 @@ pub(crate) mod test {
// ==== checking the top level infos
assert_eq!(dump.version(), Version::V6);
assert!(dump.date().is_some());
assert_eq!(
dump.instance_uid().unwrap().unwrap(),
create_test_instance_uid()
);
assert_eq!(dump.instance_uid().unwrap().unwrap(), create_test_instance_uid());
// ==== checking the index
let mut indexes = dump.indexes().unwrap();
@ -475,10 +445,7 @@ pub(crate) mod test {
"A content file was expected for the task {}.",
expected.0.uid
);
let updates = content_file
.unwrap()
.collect::<Result<Vec<_>, _>>()
.unwrap();
let updates = content_file.unwrap().collect::<Result<Vec<_>, _>>().unwrap();
assert_eq!(updates, expected_update);
}
}

View File

@ -4,11 +4,10 @@ use std::str::FromStr;
use time::OffsetDateTime;
use uuid::Uuid;
use super::v3_to_v4::CompatV3ToV4;
use crate::reader::{v2, v3, Document};
use crate::Result;
use super::v3_to_v4::CompatV3ToV4;
pub struct CompatV2ToV3 {
pub from: v2::V2Reader,
}
@ -22,10 +21,7 @@ impl CompatV2ToV3 {
self.from
.index_uuid()
.into_iter()
.map(|index| v3::meta::IndexUuid {
uid: index.uid,
uuid: index.uuid,
})
.map(|index| v3::meta::IndexUuid { uid: index.uid, uuid: index.uuid })
.collect()
}
@ -65,10 +61,7 @@ impl CompatV2ToV3 {
.tasks()
.map(move |task| {
task.map(|(task, content_file)| {
let task = v3::Task {
uuid: task.uuid,
update: task.update.into(),
};
let task = v3::Task { uuid: task.uuid, update: task.update.into() };
Some((
task,
@ -216,11 +209,10 @@ impl TryFrom<(v2::updates::UpdateMeta, Option<Uuid>)> for v3::updates::Update {
fn try_from((update, uuid): (v2::updates::UpdateMeta, Option<Uuid>)) -> Result<Self> {
Ok(match update {
v2::updates::UpdateMeta::DocumentsAddition {
method,
format: _,
primary_key,
} if uuid.is_some() => v3::updates::Update::DocumentAddition {
v2::updates::UpdateMeta::DocumentsAddition { method, format: _, primary_key }
if uuid.is_some() =>
{
v3::updates::Update::DocumentAddition {
primary_key,
method: match method {
v2::updates::IndexDocumentsMethod::ReplaceDocuments => {
@ -231,7 +223,8 @@ impl TryFrom<(v2::updates::UpdateMeta, Option<Uuid>)> for v3::updates::Update {
}
},
content_uuid: uuid.unwrap(),
},
}
}
v2::updates::UpdateMeta::DocumentsAddition { .. } => {
return Err(crate::Error::MalformedTask)
}
@ -248,11 +241,8 @@ impl TryFrom<(v2::updates::UpdateMeta, Option<Uuid>)> for v3::updates::Update {
pub fn update_from_unchecked_update_meta(update: v2::updates::UpdateMeta) -> v3::updates::Update {
match update {
v2::updates::UpdateMeta::DocumentsAddition {
method,
format: _,
primary_key,
} => v3::updates::Update::DocumentAddition {
v2::updates::UpdateMeta::DocumentsAddition { method, format: _, primary_key } => {
v3::updates::Update::DocumentAddition {
primary_key,
method: match method {
v2::updates::IndexDocumentsMethod::ReplaceDocuments => {
@ -264,7 +254,8 @@ pub fn update_from_unchecked_update_meta(update: v2::updates::UpdateMeta) -> v3:
},
// we use this special uuid so we can recognize it if one day there is a bug related to this field.
content_uuid: Uuid::from_str("00112233-4455-6677-8899-aabbccddeeff").unwrap(),
},
}
}
v2::updates::UpdateMeta::ClearDocuments => v3::updates::Update::ClearDocuments,
v2::updates::UpdateMeta::DeleteDocuments { ids } => {
v3::updates::Update::DeleteDocuments(ids)
@ -354,10 +345,7 @@ impl<T> From<v2::Settings<T>> for v3::Settings<v3::Unchecked> {
.map(|f| f.into_iter().collect()),
sortable_attributes: v3::Setting::NotSet,
ranking_rules: option_to_setting(settings.ranking_rules).map(|criteria| {
criteria
.into_iter()
.map(|criterion| patch_ranking_rules(&criterion))
.collect()
criteria.into_iter().map(|criterion| patch_ranking_rules(&criterion)).collect()
}),
stop_words: option_to_setting(settings.stop_words),
synonyms: option_to_setting(settings.synonyms),
@ -383,7 +371,8 @@ fn patch_ranking_rules(ranking_rule: &str) -> String {
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::BufReader;
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
@ -412,11 +401,7 @@ pub(crate) mod test {
assert!(update_files[0].is_some()); // the enqueued document addition
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
let update_file = update_files
.remove(0)
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
// indexes
@ -441,11 +426,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"f43338ecceeddd1ce13ffd55438b2347");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
@ -460,11 +441,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"0d76c745cb334e8c20d6d6a14df733e1");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
@ -479,11 +456,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"09a2f7c571729f70f4cd93e24e8e3f28");
let documents = movies2
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 0);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
@ -498,11 +471,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"09a2f7c571729f70f4cd93e24e8e3f28");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -1,8 +1,7 @@
use crate::reader::{v3, v4, UpdateFile};
use crate::Result;
use super::v2_to_v3::{CompatIndexV2ToV3, CompatV2ToV3};
use super::v4_to_v5::CompatV4ToV5;
use crate::reader::{v3, v4, UpdateFile};
use crate::Result;
pub enum CompatV3ToV4 {
V3(v3::V3Reader),
@ -38,18 +37,15 @@ impl CompatV3ToV4 {
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<CompatIndexV3ToV4>> + '_> {
Ok(match self {
CompatV3ToV4::V3(v3) => Box::new(
v3.indexes()?
.map(|index| index.map(CompatIndexV3ToV4::from)),
)
as Box<dyn Iterator<Item = Result<CompatIndexV3ToV4>> + '_>,
CompatV3ToV4::V3(v3) => {
Box::new(v3.indexes()?.map(|index| index.map(CompatIndexV3ToV4::from)))
as Box<dyn Iterator<Item = Result<CompatIndexV3ToV4>> + '_>
}
CompatV3ToV4::Compat(compat) => Box::new(
compat
.indexes()?
.map(|index| index.map(CompatIndexV3ToV4::from)),
)
as Box<dyn Iterator<Item = Result<CompatIndexV3ToV4>> + '_>,
CompatV3ToV4::Compat(compat) => {
Box::new(compat.indexes()?.map(|index| index.map(CompatIndexV3ToV4::from)))
as Box<dyn Iterator<Item = Result<CompatIndexV3ToV4>> + '_>
}
})
}
@ -341,7 +337,8 @@ impl<T> From<v3::Settings<T>> for v4::Settings<v4::Unchecked> {
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::BufReader;
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
@ -370,11 +367,7 @@ pub(crate) mod test {
assert!(update_files[0].is_some()); // the enqueued document addition
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
let update_file = update_files
.remove(0)
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
// keys
@ -403,11 +396,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ea46dd6b58c5e1d65c1c8159a32695ea");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
@ -422,11 +411,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"4df4074ef6bfb71e8dc66d08ff8c9dfd");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
@ -441,11 +426,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"24eaf4046d9718dabff36f35103352d4");
let documents = movies2
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 0);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
@ -460,11 +441,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"24eaf4046d9718dabff36f35103352d4");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -1,8 +1,7 @@
use crate::reader::{v4, v5, Document};
use crate::Result;
use super::v3_to_v4::{CompatIndexV3ToV4, CompatV3ToV4};
use super::v5_to_v6::CompatV5ToV6;
use crate::reader::{v4, v5, Document};
use crate::Result;
pub enum CompatV4ToV5 {
V4(v4::V4Reader),
@ -41,18 +40,15 @@ impl CompatV4ToV5 {
pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>> {
Ok(match self {
CompatV4ToV5::V4(v4) => Box::new(
v4.indexes()?
.map(|index| index.map(CompatIndexV4ToV5::from)),
)
as Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>,
CompatV4ToV5::V4(v4) => {
Box::new(v4.indexes()?.map(|index| index.map(CompatIndexV4ToV5::from)))
as Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>
}
CompatV4ToV5::Compat(compat) => Box::new(
compat
.indexes()?
.map(|index| index.map(CompatIndexV4ToV5::from)),
)
as Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>,
CompatV4ToV5::Compat(compat) => {
Box::new(compat.indexes()?.map(|index| index.map(CompatIndexV4ToV5::from)))
as Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>
}
})
}
@ -138,13 +134,9 @@ impl CompatV4ToV5 {
v4::tasks::TaskEvent::Created(date) => {
v5::tasks::TaskEvent::Created(date)
}
v4::tasks::TaskEvent::Batched {
timestamp,
batch_id,
} => v5::tasks::TaskEvent::Batched {
timestamp,
batch_id,
},
v4::tasks::TaskEvent::Batched { timestamp, batch_id } => {
v5::tasks::TaskEvent::Batched { timestamp, batch_id }
}
v4::tasks::TaskEvent::Processing(date) => {
v5::tasks::TaskEvent::Processing(date)
}
@ -196,11 +188,7 @@ impl CompatV4ToV5 {
description: key.description,
name: None,
uid: v5::keys::KeyId::new_v4(),
actions: key
.actions
.into_iter()
.filter_map(|action| action.into())
.collect(),
actions: key.actions.into_iter().filter_map(|action| action.into()).collect(),
indexes: key
.indexes
.into_iter()
@ -385,7 +373,8 @@ impl From<v4::Action> for Option<v5::Action> {
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::BufReader;
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
@ -440,11 +429,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ed1a6977a832b1ab49cd5068b77ce498");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
@ -459,11 +444,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"70681af1d52411218036fbd5a9b94ab5");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
@ -478,11 +459,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7019bb8f146004dcdd91fc3c3254b742");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -1,8 +1,7 @@
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
use crate::reader::{v5, v6, Document, UpdateFile};
use crate::Result;
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
pub enum CompatV5ToV6 {
V5(v5::V5Reader),
Compat(CompatV4ToV5),
@ -36,18 +35,15 @@ impl CompatV5ToV6 {
pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>> {
let indexes = match self {
CompatV5ToV6::V5(v5) => Box::new(
v5.indexes()?
.map(|index| index.map(CompatIndexV5ToV6::from)),
)
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>,
CompatV5ToV6::V5(v5) => {
Box::new(v5.indexes()?.map(|index| index.map(CompatIndexV5ToV6::from)))
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>
}
CompatV5ToV6::Compat(compat) => Box::new(
compat
.indexes()?
.map(|index| index.map(CompatIndexV5ToV6::from)),
)
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>,
CompatV5ToV6::Compat(compat) => {
Box::new(compat.indexes()?.map(|index| index.map(CompatIndexV5ToV6::from)))
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>
}
};
Ok(indexes)
}
@ -127,16 +123,15 @@ impl CompatV5ToV6 {
},
canceled_by: None,
details: task_view.details.map(|details| match details {
v5::Details::DocumentAddition {
received_documents,
indexed_documents,
} => v6::Details::DocumentAddition {
v5::Details::DocumentAddition { received_documents, indexed_documents } => {
v6::Details::DocumentAddition {
received_documents: received_documents as u64,
indexed_documents: indexed_documents.map(|i| i as u64),
},
v5::Details::Settings { settings } => v6::Details::Settings {
settings: settings.into(),
},
}
}
v5::Details::Settings { settings } => {
v6::Details::Settings { settings: settings.into() }
}
v5::Details::IndexInfo { primary_key } => {
v6::Details::IndexInfo { primary_key }
}
@ -174,11 +169,7 @@ impl CompatV5ToV6 {
description: key.description,
name: key.name,
uid: key.uid,
actions: key
.actions
.into_iter()
.map(|action| action.into())
.collect(),
actions: key.actions.into_iter().map(|action| action.into()).collect(),
indexes: key
.indexes
.into_iter()
@ -396,7 +387,8 @@ impl From<v5::Action> for v6::Action {
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::BufReader;
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
@ -452,11 +444,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
@ -471,11 +459,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 200);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
@ -490,11 +474,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -1,5 +1,9 @@
use std::io::Read;
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::{BufReader, Read};
use flate2::bufread::GzDecoder;
use serde::Deserialize;
use tempfile::TempDir;
use self::compat::v4_to_v5::CompatV4ToV5;
use self::compat::v5_to_v6::{CompatIndexV5ToV6, CompatV5ToV6};
@ -7,10 +11,6 @@ use self::v5::V5Reader;
use self::v6::{V6IndexReader, V6Reader};
use crate::{Error, Result, Version};
use flate2::bufread::GzDecoder;
use serde::Deserialize;
use tempfile::TempDir;
mod compat;
// pub(self) mod v1;
@ -47,12 +47,7 @@ impl DumpReader {
match dump_version {
// Version::V1 => Ok(Box::new(v1::Reader::open(path)?)),
Version::V1 => Err(Error::DumpV1Unsupported),
Version::V2 => Ok(v2::V2Reader::open(path)?
.to_v3()
.to_v4()
.to_v5()
.to_v6()
.into()),
Version::V2 => Ok(v2::V2Reader::open(path)?.to_v3().to_v4().to_v5().to_v6().into()),
Version::V3 => Ok(v3::V3Reader::open(path)?.to_v4().to_v5().to_v6().into()),
Version::V4 => Ok(v4::V4Reader::open(path)?.to_v5().to_v6().into()),
Version::V5 => Ok(v5::V5Reader::open(path)?.to_v6().into()),
@ -234,11 +229,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
@ -253,11 +244,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 200);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
@ -272,11 +259,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}
@ -323,11 +306,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ed1a6977a832b1ab49cd5068b77ce498");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
@ -342,11 +321,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"70681af1d52411218036fbd5a9b94ab5");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
@ -361,11 +336,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7019bb8f146004dcdd91fc3c3254b742");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}
@ -413,11 +384,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"1a5ed16d00e6163662d9d7ffe400c5d0");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
@ -432,11 +399,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"9a6b511669b8f53d193d2f0bd1671baa");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
@ -451,11 +414,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"4fdf905496d9a511800ff523728728ac");
let documents = movies2
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 0);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
@ -470,11 +429,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"4fdf905496d9a511800ff523728728ac");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}
@ -522,11 +477,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"a7d4fed93bfc91d0f1126d3371abf48e");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
@ -541,11 +492,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"e79c3cc4eef44bd22acfb60957b459d9");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
@ -560,11 +507,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"7917f954b6f345336073bb155540ad6d");
let documents = movies2
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 0);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
@ -579,11 +522,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7917f954b6f345336073bb155540ad6d");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -22,11 +22,9 @@
//! └── update_202573df-718b-4d80-9a65-2ee397c23dc3
//! ```
use std::{
fs::{self, File},
io::{BufRead, BufReader},
path::Path,
};
use std::fs::{self, File};
use std::io::{BufRead, BufReader};
use std::path::Path;
use serde::{Deserialize, Serialize};
use tempfile::TempDir;
@ -37,11 +35,10 @@ pub mod meta;
pub mod settings;
pub mod updates;
use crate::{IndexMetadata, Result, Version};
use self::meta::{DumpMeta, IndexUuid};
use super::{compat::v2_to_v3::CompatV2ToV3, Document};
use super::compat::v2_to_v3::CompatV2ToV3;
use super::Document;
use crate::{IndexMetadata, Result, Version};
pub type Settings<T> = settings::Settings<T>;
pub type Checked = settings::Checked;
@ -110,11 +107,7 @@ impl V2Reader {
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
Ok(V2IndexReader::new(
index.uid.clone(),
&self
.dump
.path()
.join("indexes")
.join(format!("index-{}", index.uuid.to_string())),
&self.dump.path().join("indexes").join(format!("index-{}", index.uuid.to_string())),
)?)
}))
}
@ -193,10 +186,7 @@ pub struct UpdateFile {
impl UpdateFile {
fn new(path: &Path) -> Result<Self> {
let reader = BufReader::new(File::open(path)?);
Ok(UpdateFile {
documents: serde_json::from_reader(reader)?,
index: 0,
})
Ok(UpdateFile { documents: serde_json::from_reader(reader)?, index: 0 })
}
}
@ -211,7 +201,8 @@ impl Iterator for UpdateFile {
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::BufReader;
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
@ -240,11 +231,7 @@ pub(crate) mod test {
assert!(update_files[0].is_some()); // the enqueued document addition
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
let update_file = update_files
.remove(0)
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
// indexes
@ -269,11 +256,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"b4814eab5e73e2dcfc90aad50aa583d1");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
@ -288,11 +271,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"59dd69f590635a58f3d99edc9e1fa21f");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
@ -307,11 +286,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"ac041085004c43373fe90dc48f5c23ab");
let documents = movies2
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 0);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
@ -326,11 +301,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"ac041085004c43373fe90dc48f5c23ab");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -1,8 +1,6 @@
use std::{
collections::{BTreeMap, BTreeSet, HashSet},
marker::PhantomData,
str::FromStr,
};
use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::marker::PhantomData;
use std::str::FromStr;
use once_cell::sync::Lazy;
use regex::Regex;
@ -39,10 +37,7 @@ pub struct Unchecked;
#[cfg_attr(test, derive(serde::Serialize))]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[serde(bound(
serialize = "T: serde::Serialize",
deserialize = "T: Deserialize<'static>"
))]
#[serde(bound(serialize = "T: serde::Serialize", deserialize = "T: Deserialize<'static>"))]
pub struct Settings<T> {
#[serde(
default,

View File

@ -22,11 +22,9 @@
//! └── 66d3f12d-fcf3-4b53-88cb-407017373de7
//! ```
use std::{
fs::{self, File},
io::{BufRead, BufReader},
path::Path,
};
use std::fs::{self, File};
use std::io::{BufRead, BufReader};
use std::path::Path;
use serde::{Deserialize, Serialize};
use tempfile::TempDir;
@ -37,11 +35,10 @@ pub mod meta;
pub mod settings;
pub mod updates;
use crate::{Error, IndexMetadata, Result, Version};
use self::meta::{DumpMeta, IndexUuid};
use super::{compat::v3_to_v4::CompatV3ToV4, Document};
use super::compat::v3_to_v4::CompatV3ToV4;
use super::Document;
use crate::{Error, IndexMetadata, Result, Version};
pub type Settings<T> = settings::Settings<T>;
pub type Checked = settings::Checked;
@ -116,11 +113,7 @@ impl V3Reader {
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
Ok(V3IndexReader::new(
index.uid.clone(),
&self
.dump
.path()
.join("indexes")
.join(index.uuid.to_string()),
&self.dump.path().join("indexes").join(index.uuid.to_string()),
)?)
}))
}
@ -204,9 +197,7 @@ pub struct UpdateFile {
impl UpdateFile {
fn new(path: &Path) -> Result<Self> {
Ok(UpdateFile {
reader: BufReader::new(File::open(path)?),
})
Ok(UpdateFile { reader: BufReader::new(File::open(path)?) })
}
}
@ -226,7 +217,8 @@ impl Iterator for UpdateFile {
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::BufReader;
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
@ -255,11 +247,7 @@ pub(crate) mod test {
assert!(update_files[0].is_some()); // the enqueued document addition
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
let update_file = update_files
.remove(0)
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
// indexes
@ -284,11 +272,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"7460d4b242b5c8b1bda223f63bbbf349");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
@ -303,11 +287,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d83ab8e79bb44595667d6ce3e6629a4f");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
@ -322,11 +302,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"44d3b5a3b3aa6cd950373ff751d05bb7");
let documents = movies2
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 0);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
@ -341,11 +317,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"44d3b5a3b3aa6cd950373ff751d05bb7");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -1,8 +1,6 @@
use std::{
collections::{BTreeMap, BTreeSet},
marker::PhantomData,
num::NonZeroUsize,
};
use std::collections::{BTreeMap, BTreeSet};
use std::marker::PhantomData;
use std::num::NonZeroUsize;
use serde::{Deserialize, Deserializer};
@ -40,10 +38,7 @@ pub struct Unchecked;
#[cfg_attr(test, derive(serde::Serialize))]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[serde(bound(
serialize = "T: serde::Serialize",
deserialize = "T: Deserialize<'static>"
))]
#[serde(bound(serialize = "T: serde::Serialize", deserialize = "T: Deserialize<'static>"))]
pub struct Settings<T> {
#[serde(
default,

View File

@ -8,10 +8,7 @@ use serde::{Deserialize, Serialize};
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
pub struct ResponseError {
#[serde(skip)]
#[cfg_attr(
feature = "test-traits",
proptest(strategy = "strategy::status_code_strategy()")
)]
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
pub code: StatusCode,
pub message: String,
#[serde(rename = "code")]
@ -206,10 +203,9 @@ impl Code {
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
DatabaseSizeLimitReached => ErrCode::internal(
"database_size_limit_reached",
StatusCode::INTERNAL_SERVER_ERROR,
),
DatabaseSizeLimitReached => {
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
}
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
@ -302,26 +298,14 @@ struct ErrCode {
impl ErrCode {
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::AuthenticationError,
}
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
}
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InternalError,
}
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
}
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InvalidRequestError,
}
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
}
}

View File

@ -1,10 +1,9 @@
use std::{
fmt::{self, Display, Formatter},
marker::PhantomData,
str::FromStr,
};
use std::fmt::{self, Display, Formatter};
use std::marker::PhantomData;
use std::str::FromStr;
use serde::{de::Visitor, Deserialize, Deserializer};
use serde::de::Visitor;
use serde::{Deserialize, Deserializer};
use uuid::Uuid;
use super::settings::{Settings, Unchecked};
@ -39,9 +38,7 @@ impl TryFrom<String> for IndexUid {
type Error = IndexUidFormatError;
fn try_from(uid: String) -> Result<Self, Self::Error> {
if !uid
.chars()
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
if !uid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|| uid.is_empty()
|| uid.len() > 400
{

View File

@ -1,8 +1,6 @@
use std::{
fs::{self, File},
io::{BufRead, BufReader},
path::Path,
};
use std::fs::{self, File};
use std::io::{BufRead, BufReader};
use std::path::Path;
use serde::{Deserialize, Serialize};
use tempfile::TempDir;
@ -15,11 +13,9 @@ pub mod meta;
pub mod settings;
pub mod tasks;
use crate::{Error, IndexMetadata, Result, Version};
use self::meta::{DumpMeta, IndexUuid};
use super::compat::v4_to_v5::CompatV4ToV5;
use crate::{Error, IndexMetadata, Result, Version};
pub type Document = serde_json::Map<String, serde_json::Value>;
pub type Settings<T> = settings::Settings<T>;
@ -100,11 +96,7 @@ impl V4Reader {
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
Ok(V4IndexReader::new(
index.uid.clone(),
&self
.dump
.path()
.join("indexes")
.join(index.index_meta.uuid.to_string()),
&self.dump.path().join("indexes").join(index.index_meta.uuid.to_string()),
)?)
}))
}
@ -139,9 +131,7 @@ impl V4Reader {
pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
Box::new(
(&mut self.keys)
.lines()
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
)
}
}
@ -196,9 +186,7 @@ pub struct UpdateFile {
impl UpdateFile {
fn new(path: &Path) -> Result<Self> {
Ok(UpdateFile {
reader: BufReader::new(File::open(path)?),
})
Ok(UpdateFile { reader: BufReader::new(File::open(path)?) })
}
}
@ -218,7 +206,8 @@ impl Iterator for UpdateFile {
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::BufReader;
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
@ -248,11 +237,7 @@ pub(crate) mod test {
assert!(update_files[0].is_some()); // the enqueued document addition
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
let update_file = update_files
.remove(0)
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
// keys
@ -280,11 +265,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ace6546a6eb856ecb770b2409975c01d");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
@ -299,11 +280,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"4dfa34fa34f2c03259482e1e4555faa8");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 110);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
@ -318,11 +295,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"1aa241a5e3afd8c85a4e7b9db42362d7");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -1,8 +1,6 @@
use std::{
collections::{BTreeMap, BTreeSet},
marker::PhantomData,
num::NonZeroUsize,
};
use std::collections::{BTreeMap, BTreeSet};
use std::marker::PhantomData;
use std::num::NonZeroUsize;
use serde::{Deserialize, Deserializer};
@ -65,10 +63,7 @@ pub struct TypoSettings {
#[cfg_attr(test, derive(serde::Serialize))]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[serde(bound(
serialize = "T: serde::Serialize",
deserialize = "T: Deserialize<'static>"
))]
#[serde(bound(serialize = "T: serde::Serialize", deserialize = "T: Deserialize<'static>"))]
pub struct Settings<T> {
#[serde(
default,

View File

@ -2,11 +2,9 @@ use serde::Deserialize;
use time::OffsetDateTime;
use uuid::Uuid;
use super::{
errors::ResponseError,
meta::IndexUid,
settings::{Settings, Unchecked},
};
use super::errors::ResponseError;
use super::meta::IndexUid;
use super::settings::{Settings, Unchecked};
pub type TaskId = u32;
pub type BatchId = u32;
@ -109,10 +107,9 @@ impl Task {
/// Return the content_uuid of the `Task` if there is one.
pub fn get_content_uuid(&self) -> Option<Uuid> {
match self {
Task {
content: TaskContent::DocumentAddition { content_uuid, .. },
..
} => Some(*content_uuid),
Task { content: TaskContent::DocumentAddition { content_uuid, .. }, .. } => {
Some(*content_uuid)
}
_ => None,
}
}

View File

@ -142,10 +142,9 @@ impl Code {
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
DatabaseSizeLimitReached => ErrCode::internal(
"database_size_limit_reached",
StatusCode::INTERNAL_SERVER_ERROR,
),
DatabaseSizeLimitReached => {
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
}
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
@ -241,27 +240,15 @@ struct ErrCode {
impl ErrCode {
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::AuthenticationError,
}
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
}
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InternalError,
}
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
}
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InvalidRequestError,
}
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
}
}

View File

@ -1,10 +1,9 @@
use std::{
fmt::{self, Display, Formatter},
marker::PhantomData,
str::FromStr,
};
use std::fmt::{self, Display, Formatter};
use std::marker::PhantomData;
use std::str::FromStr;
use serde::{de::Visitor, Deserialize, Deserializer};
use serde::de::Visitor;
use serde::{Deserialize, Deserializer};
use uuid::Uuid;
use super::settings::{Settings, Unchecked};
@ -39,9 +38,7 @@ impl TryFrom<String> for IndexUid {
type Error = IndexUidFormatError;
fn try_from(uid: String) -> Result<Self, Self::Error> {
if !uid
.chars()
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
if !uid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|| uid.is_empty()
|| uid.len() > 400
{

View File

@ -32,21 +32,19 @@
//! ```
//!
use std::{
fs::{self, File},
io::{BufRead, BufReader, Seek, SeekFrom},
path::Path,
};
use std::fs::{self, File};
use std::io::{BufRead, BufReader, Seek, SeekFrom};
use std::path::Path;
use serde::{Deserialize, Serialize};
use tempfile::TempDir;
use time::OffsetDateTime;
use uuid::Uuid;
use super::compat::v5_to_v6::CompatV5ToV6;
use super::Document;
use crate::{Error, IndexMetadata, Result, Version};
use super::{compat::v5_to_v6::CompatV5ToV6, Document};
pub mod errors;
pub mod keys;
pub mod meta;
@ -139,11 +137,7 @@ impl V5Reader {
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
Ok(V5IndexReader::new(
index.uid.clone(),
&self
.dump
.path()
.join("indexes")
.join(index.index_meta.uuid.to_string()),
&self.dump.path().join("indexes").join(index.index_meta.uuid.to_string()),
)?)
}))
}
@ -178,9 +172,9 @@ impl V5Reader {
pub fn keys(&mut self) -> Result<Box<dyn Iterator<Item = Result<Key>> + '_>> {
self.keys.seek(SeekFrom::Start(0))?;
Ok(Box::new((&mut self.keys).lines().map(
|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) },
)))
Ok(Box::new(
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
))
}
}
@ -234,9 +228,7 @@ pub struct UpdateFile {
impl UpdateFile {
fn new(path: &Path) -> Result<Self> {
Ok(UpdateFile {
reader: BufReader::new(File::open(path)?),
})
Ok(UpdateFile { reader: BufReader::new(File::open(path)?) })
}
}
@ -256,7 +248,8 @@ impl Iterator for UpdateFile {
#[cfg(test)]
pub(crate) mod test {
use std::{fs::File, io::BufReader};
use std::fs::File;
use std::io::BufReader;
use flate2::bufread::GzDecoder;
use tempfile::TempDir;
@ -287,11 +280,7 @@ pub(crate) mod test {
assert!(update_files[1].is_some()); // the enqueued document addition
assert!(update_files[2..].iter().all(|u| u.is_none())); // everything already processed
let update_file = update_files
.remove(1)
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let update_file = update_files.remove(1).unwrap().collect::<Result<Vec<_>>>().unwrap();
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
// keys
@ -319,11 +308,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
let documents = products
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
@ -338,11 +323,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
let documents = movies
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 200);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
@ -357,11 +338,7 @@ pub(crate) mod test {
"###);
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
let documents = spells
.documents()
.unwrap()
.collect::<Result<Vec<_>>>()
.unwrap();
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
}

View File

@ -1,7 +1,5 @@
use std::{
collections::{BTreeMap, BTreeSet},
marker::PhantomData,
};
use std::collections::{BTreeMap, BTreeSet};
use std::marker::PhantomData;
use serde::{Deserialize, Deserializer, Serialize};

View File

@ -2,11 +2,9 @@ use serde::Deserialize;
use time::{Duration, OffsetDateTime};
use uuid::Uuid;
use super::{
errors::ResponseError,
meta::IndexUid,
settings::{Settings, Unchecked},
};
use super::errors::ResponseError;
use super::meta::IndexUid;
use super::settings::{Settings, Unchecked};
pub type TaskId = u32;
pub type BatchId = u32;
@ -117,20 +115,16 @@ impl Task {
/// A task is finished when its last state is either `Succeeded` or `Failed`.
pub fn is_finished(&self) -> bool {
self.events.last().map_or(false, |event| {
matches!(
event,
TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. }
)
matches!(event, TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. })
})
}
/// Return the content_uuid of the `Task` if there is one.
pub fn get_content_uuid(&self) -> Option<Uuid> {
match self {
Task {
content: TaskContent::DocumentAddition { content_uuid, .. },
..
} => Some(*content_uuid),
Task { content: TaskContent::DocumentAddition { content_uuid, .. }, .. } => {
Some(*content_uuid)
}
_ => None,
}
}
@ -184,31 +178,19 @@ pub struct TaskView {
pub duration: Option<Duration>,
#[cfg_attr(test, serde(serialize_with = "time::serde::rfc3339::serialize"))]
pub enqueued_at: OffsetDateTime,
#[cfg_attr(
test,
serde(serialize_with = "time::serde::rfc3339::option::serialize")
)]
#[cfg_attr(test, serde(serialize_with = "time::serde::rfc3339::option::serialize"))]
pub started_at: Option<OffsetDateTime>,
#[cfg_attr(
test,
serde(serialize_with = "time::serde::rfc3339::option::serialize")
)]
#[cfg_attr(test, serde(serialize_with = "time::serde::rfc3339::option::serialize"))]
pub finished_at: Option<OffsetDateTime>,
}
impl From<Task> for TaskView {
fn from(task: Task) -> Self {
let index_uid = task.index_uid().map(String::from);
let Task {
id,
content,
events,
} = task;
let Task { id, content, events } = task;
let (task_type, mut details) = match content {
TaskContent::DocumentAddition {
documents_count, ..
} => {
TaskContent::DocumentAddition { documents_count, .. } => {
let details = TaskDetails::DocumentAddition {
received_documents: documents_count,
indexed_documents: None,
@ -216,47 +198,32 @@ impl From<Task> for TaskView {
(TaskType::DocumentAdditionOrUpdate, Some(details))
}
TaskContent::DocumentDeletion {
deletion: DocumentDeletion::Ids(ids),
..
} => (
TaskContent::DocumentDeletion { deletion: DocumentDeletion::Ids(ids), .. } => (
TaskType::DocumentDeletion,
Some(TaskDetails::DocumentDeletion {
received_document_ids: ids.len(),
deleted_documents: None,
}),
),
TaskContent::DocumentDeletion {
deletion: DocumentDeletion::Clear,
..
} => (
TaskContent::DocumentDeletion { deletion: DocumentDeletion::Clear, .. } => (
TaskType::DocumentDeletion,
Some(TaskDetails::ClearAll {
deleted_documents: None,
}),
),
TaskContent::IndexDeletion { .. } => (
TaskType::IndexDeletion,
Some(TaskDetails::ClearAll {
deleted_documents: None,
}),
),
TaskContent::SettingsUpdate { settings, .. } => (
TaskType::SettingsUpdate,
Some(TaskDetails::Settings { settings }),
),
TaskContent::IndexCreation { primary_key, .. } => (
TaskType::IndexCreation,
Some(TaskDetails::IndexInfo { primary_key }),
),
TaskContent::IndexUpdate { primary_key, .. } => (
TaskType::IndexUpdate,
Some(TaskDetails::IndexInfo { primary_key }),
),
TaskContent::Dump { uid } => (
TaskType::DumpCreation,
Some(TaskDetails::Dump { dump_uid: uid }),
Some(TaskDetails::ClearAll { deleted_documents: None }),
),
TaskContent::IndexDeletion { .. } => {
(TaskType::IndexDeletion, Some(TaskDetails::ClearAll { deleted_documents: None }))
}
TaskContent::SettingsUpdate { settings, .. } => {
(TaskType::SettingsUpdate, Some(TaskDetails::Settings { settings }))
}
TaskContent::IndexCreation { primary_key, .. } => {
(TaskType::IndexCreation, Some(TaskDetails::IndexInfo { primary_key }))
}
TaskContent::IndexUpdate { primary_key, .. } => {
(TaskType::IndexUpdate, Some(TaskDetails::IndexInfo { primary_key }))
}
TaskContent::Dump { uid } => {
(TaskType::DumpCreation, Some(TaskDetails::Dump { dump_uid: uid }))
}
};
// An event always has at least one event: "Created"
@ -267,36 +234,20 @@ impl From<Task> for TaskView {
TaskEvent::Succeeded { timestamp, result } => {
match (result, &mut details) {
(
TaskResult::DocumentAddition {
indexed_documents: num,
..
},
Some(TaskDetails::DocumentAddition {
ref mut indexed_documents,
..
}),
TaskResult::DocumentAddition { indexed_documents: num, .. },
Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }),
) => {
indexed_documents.replace(*num);
}
(
TaskResult::DocumentDeletion {
deleted_documents: docs,
..
},
Some(TaskDetails::DocumentDeletion {
ref mut deleted_documents,
..
}),
TaskResult::DocumentDeletion { deleted_documents: docs, .. },
Some(TaskDetails::DocumentDeletion { ref mut deleted_documents, .. }),
) => {
deleted_documents.replace(*docs);
}
(
TaskResult::ClearAll {
deleted_documents: docs,
},
Some(TaskDetails::ClearAll {
ref mut deleted_documents,
}),
TaskResult::ClearAll { deleted_documents: docs },
Some(TaskDetails::ClearAll { ref mut deleted_documents }),
) => {
deleted_documents.replace(*docs);
}
@ -306,22 +257,13 @@ impl From<Task> for TaskView {
}
TaskEvent::Failed { timestamp, error } => {
match details {
Some(TaskDetails::DocumentDeletion {
ref mut deleted_documents,
..
}) => {
Some(TaskDetails::DocumentDeletion { ref mut deleted_documents, .. }) => {
deleted_documents.replace(0);
}
Some(TaskDetails::ClearAll {
ref mut deleted_documents,
..
}) => {
Some(TaskDetails::ClearAll { ref mut deleted_documents, .. }) => {
deleted_documents.replace(0);
}
Some(TaskDetails::DocumentAddition {
ref mut indexed_documents,
..
}) => {
Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }) => {
indexed_documents.replace(0);
}
_ => (),
@ -400,10 +342,7 @@ pub enum TaskStatus {
#[allow(clippy::large_enum_variant)]
pub enum TaskDetails {
#[cfg_attr(test, serde(rename_all = "camelCase"))]
DocumentAddition {
received_documents: usize,
indexed_documents: Option<u64>,
},
DocumentAddition { received_documents: usize, indexed_documents: Option<u64> },
#[cfg_attr(test, serde(rename_all = "camelCase"))]
Settings {
#[cfg_attr(test, serde(flatten))]
@ -412,10 +351,7 @@ pub enum TaskDetails {
#[cfg_attr(test, serde(rename_all = "camelCase"))]
IndexInfo { primary_key: Option<String> },
#[cfg_attr(test, serde(rename_all = "camelCase"))]
DocumentDeletion {
received_document_ids: usize,
deleted_documents: Option<u64>,
},
DocumentDeletion { received_document_ids: usize, deleted_documents: Option<u64> },
#[cfg_attr(test, serde(rename_all = "camelCase"))]
ClearAll { deleted_documents: Option<u64> },
#[cfg_attr(test, serde(rename_all = "camelCase"))]

View File

@ -1,19 +1,15 @@
use std::{
fs::{self, File},
io::{BufRead, BufReader},
path::Path,
str::FromStr,
};
use std::fs::{self, File};
use std::io::{BufRead, BufReader};
use std::path::Path;
use std::str::FromStr;
pub use meilisearch_types::milli;
use tempfile::TempDir;
use time::OffsetDateTime;
use uuid::Uuid;
use crate::{Error, IndexMetadata, Result, Version};
pub use meilisearch_types::milli;
use super::Document;
use crate::{Error, IndexMetadata, Result, Version};
pub type Metadata = crate::Metadata;
@ -89,11 +85,7 @@ impl V6Reader {
let entry = entry?;
if entry.file_type()?.is_dir() {
let index = V6IndexReader::new(
entry
.file_name()
.to_str()
.ok_or(Error::BadIndexName)?
.to_string(),
entry.file_name().to_str().ok_or(Error::BadIndexName)?.to_string(),
&entry.path(),
)?;
Ok(Some(index))
@ -132,9 +124,7 @@ impl V6Reader {
pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
Box::new(
(&mut self.keys)
.lines()
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
)
}
}
@ -145,9 +135,7 @@ pub struct UpdateFile {
impl UpdateFile {
fn new(path: &Path) -> Result<Self> {
Ok(UpdateFile {
reader: BufReader::new(File::open(path)?),
})
Ok(UpdateFile { reader: BufReader::new(File::open(path)?) })
}
}

View File

@ -1,20 +1,18 @@
use std::{
fs::{self, File},
io::{BufWriter, Write},
path::PathBuf,
};
use std::fs::{self, File};
use std::io::{BufWriter, Write};
use std::path::PathBuf;
use flate2::{write::GzEncoder, Compression};
use meilisearch_types::{
keys::Key,
settings::{Checked, Settings},
};
use flate2::write::GzEncoder;
use flate2::Compression;
use meilisearch_types::keys::Key;
use meilisearch_types::settings::{Checked, Settings};
use serde_json::{Map, Value};
use tempfile::TempDir;
use time::OffsetDateTime;
use uuid::Uuid;
use crate::{reader::Document, IndexMetadata, Metadata, Result, TaskDump, CURRENT_DUMP_VERSION};
use crate::reader::Document;
use crate::{IndexMetadata, Metadata, Result, TaskDump, CURRENT_DUMP_VERSION};
pub struct DumpWriter {
dir: TempDir,
@ -36,10 +34,7 @@ impl DumpWriter {
db_version: env!("CARGO_PKG_VERSION").to_string(),
dump_date: OffsetDateTime::now_utc(),
};
fs::write(
dir.path().join("metadata.json"),
serde_json::to_string(&metadata)?,
)?;
fs::write(dir.path().join("metadata.json"), serde_json::to_string(&metadata)?)?;
std::fs::create_dir(&dir.path().join("indexes"))?;
@ -77,9 +72,7 @@ pub struct KeyWriter {
impl KeyWriter {
pub(crate) fn new(path: PathBuf) -> Result<Self> {
let keys = File::create(path.join("keys.jsonl"))?;
Ok(KeyWriter {
keys: BufWriter::new(keys),
})
Ok(KeyWriter { keys: BufWriter::new(keys) })
}
pub fn push_key(&mut self, key: &Key) -> Result<()> {
@ -107,10 +100,7 @@ impl TaskWriter {
let update_files = path.join("update_files");
std::fs::create_dir(&update_files)?;
Ok(TaskWriter {
queue: BufWriter::new(queue),
update_files,
})
Ok(TaskWriter { queue: BufWriter::new(queue), update_files })
}
/// Pushes tasks in the dump.
@ -119,9 +109,7 @@ impl TaskWriter {
self.queue.write_all(&serde_json::to_vec(task)?)?;
self.queue.write_all(b"\n")?;
Ok(UpdateFile::new(
self.update_files.join(format!("{}.jsonl", task.uid)),
))
Ok(UpdateFile::new(self.update_files.join(format!("{}.jsonl", task.uid))))
}
pub fn flush(mut self) -> Result<()> {
@ -175,10 +163,7 @@ impl IndexWriter {
let documents = File::create(path.join("documents.jsonl"))?;
let settings = File::create(path.join("settings.json"))?;
Ok(IndexWriter {
documents: BufWriter::new(documents),
settings,
})
Ok(IndexWriter { documents: BufWriter::new(documents), settings })
}
pub fn push_document(&mut self, document: &Map<String, Value>) -> Result<()> {
@ -200,20 +185,20 @@ impl IndexWriter {
#[cfg(test)]
pub(crate) mod test {
use std::{fmt::Write, io::BufReader, path::Path, str::FromStr};
use std::fmt::Write;
use std::io::BufReader;
use std::path::Path;
use std::str::FromStr;
use flate2::bufread::GzDecoder;
use meilisearch_types::settings::Unchecked;
use crate::{
reader::Document,
test::{
create_test_api_keys, create_test_documents, create_test_dump,
create_test_instance_uid, create_test_settings, create_test_tasks,
},
};
use super::*;
use crate::reader::Document;
use crate::test::{
create_test_api_keys, create_test_documents, create_test_dump, create_test_instance_uid,
create_test_settings, create_test_tasks,
};
fn create_directory_hierarchy(dir: &Path) -> String {
let mut ret = String::new();
@ -226,10 +211,8 @@ pub(crate) mod test {
let mut ret = String::new();
// the entries are not guarenteed to be returned in the same order thus we need to sort them.
let mut entries = fs::read_dir(dir)
.unwrap()
.collect::<std::result::Result<Vec<_>, _>>()
.unwrap();
let mut entries =
fs::read_dir(dir).unwrap().collect::<std::result::Result<Vec<_>, _>>().unwrap();
// I want the directories first and then sort by name.
entries.sort_by(|a, b| {
@ -317,18 +300,12 @@ pub(crate) mod test {
"###);
let instance_uid = fs::read_to_string(dump_path.join("instance_uid.uuid")).unwrap();
assert_eq!(
Uuid::from_str(&instance_uid).unwrap(),
create_test_instance_uid()
);
assert_eq!(Uuid::from_str(&instance_uid).unwrap(), create_test_instance_uid());
// ==== checking the index
let docs = fs::read_to_string(dump_path.join("indexes/doggos/documents.jsonl")).unwrap();
for (document, expected) in docs.lines().zip(create_test_documents()) {
assert_eq!(
serde_json::from_str::<Map<String, Value>>(document).unwrap(),
expected
);
assert_eq!(serde_json::from_str::<Map<String, Value>>(document).unwrap(), expected);
}
let test_settings =
fs::read_to_string(dump_path.join("indexes/doggos/settings.json")).unwrap();
@ -356,10 +333,8 @@ pub(crate) mod test {
let path = dump_path.join(format!("tasks/update_files/{}.jsonl", expected.0.uid));
println!("trying to open {}", path.display());
let update = fs::read_to_string(path).unwrap();
let documents: Vec<Document> = update
.lines()
.map(|line| serde_json::from_str(line).unwrap())
.collect();
let documents: Vec<Document> =
update.lines().map(|line| serde_json::from_str(line).unwrap()).collect();
assert_eq!(documents, expected_update);
}
}

View File

@ -5,11 +5,12 @@ tasks affecting a single index into a [batch](crate::batch::Batch).
The main function of the autobatcher is [`next_autobatch`].
*/
use std::ops::ControlFlow::{self, Break, Continue};
use meilisearch_types::milli::update::IndexDocumentsMethod::{
self, ReplaceDocuments, UpdateDocuments,
};
use meilisearch_types::tasks::TaskId;
use std::ops::ControlFlow::{self, Break, Continue};
use crate::KindWithContent;
@ -18,15 +19,10 @@ use crate::KindWithContent;
///
/// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`]
enum AutobatchKind {
DocumentImport {
method: IndexDocumentsMethod,
allow_index_creation: bool,
},
DocumentImport { method: IndexDocumentsMethod, allow_index_creation: bool },
DocumentDeletion,
DocumentClear,
Settings {
allow_index_creation: bool,
},
Settings { allow_index_creation: bool },
IndexCreation,
IndexDeletion,
IndexUpdate,
@ -47,23 +43,16 @@ impl AutobatchKind {
impl From<KindWithContent> for AutobatchKind {
fn from(kind: KindWithContent) -> Self {
match kind {
KindWithContent::DocumentImport {
method,
allow_index_creation,
..
} => AutobatchKind::DocumentImport {
method,
allow_index_creation,
},
KindWithContent::DocumentImport { method, allow_index_creation, .. } => {
AutobatchKind::DocumentImport { method, allow_index_creation }
}
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
KindWithContent::Settings {
allow_index_creation,
is_deletion,
..
} => AutobatchKind::Settings {
KindWithContent::Settings { allow_index_creation, is_deletion, .. } => {
AutobatchKind::Settings {
allow_index_creation: allow_index_creation && !is_deletion,
},
}
}
KindWithContent::IndexDeletion { .. } => AutobatchKind::IndexDeletion,
KindWithContent::IndexCreation { .. } => AutobatchKind::IndexCreation,
KindWithContent::IndexUpdate { .. } => AutobatchKind::IndexUpdate,
@ -147,20 +136,11 @@ impl BatchKind {
match AutobatchKind::from(kind) {
K::IndexCreation => (Break(BatchKind::IndexCreation { id: task_id }), true),
K::IndexDeletion => (
Break(BatchKind::IndexDeletion { ids: vec![task_id] }),
false,
),
K::IndexDeletion => (Break(BatchKind::IndexDeletion { ids: vec![task_id] }), false),
K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false),
K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false),
K::DocumentClear => (
Continue(BatchKind::DocumentClear { ids: vec![task_id] }),
false,
),
K::DocumentImport {
method,
allow_index_creation,
} => (
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
K::DocumentImport { method, allow_index_creation } => (
Continue(BatchKind::DocumentImport {
method,
allow_index_creation,
@ -168,19 +148,11 @@ impl BatchKind {
}),
allow_index_creation,
),
K::DocumentDeletion => (
Continue(BatchKind::DocumentDeletion {
deletion_ids: vec![task_id],
}),
false,
),
K::Settings {
allow_index_creation,
} => (
Continue(BatchKind::Settings {
allow_index_creation,
settings_ids: vec![task_id],
}),
K::DocumentDeletion => {
(Continue(BatchKind::DocumentDeletion { deletion_ids: vec![task_id] }), false)
}
K::Settings { allow_index_creation } => (
Continue(BatchKind::Settings { allow_index_creation, settings_ids: vec![task_id] }),
allow_index_creation,
),
}
@ -461,21 +433,17 @@ pub fn autobatch(
#[cfg(test)]
mod tests {
use crate::debug_snapshot;
use uuid::Uuid;
use super::*;
use uuid::Uuid;
use crate::debug_snapshot;
fn autobatch_from(
index_already_exists: bool,
input: impl IntoIterator<Item = KindWithContent>,
) -> Option<(BatchKind, bool)> {
autobatch(
input
.into_iter()
.enumerate()
.map(|(id, kind)| (id as TaskId, kind.into()))
.collect(),
input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind.into())).collect(),
index_already_exists,
)
}
@ -499,9 +467,7 @@ mod tests {
}
fn doc_clr() -> KindWithContent {
KindWithContent::DocumentClear {
index_uid: String::from("doggo"),
}
KindWithContent::DocumentClear { index_uid: String::from("doggo") }
}
fn settings(allow_index_creation: bool) -> KindWithContent {
@ -514,29 +480,19 @@ mod tests {
}
fn idx_create() -> KindWithContent {
KindWithContent::IndexCreation {
index_uid: String::from("doggo"),
primary_key: None,
}
KindWithContent::IndexCreation { index_uid: String::from("doggo"), primary_key: None }
}
fn idx_update() -> KindWithContent {
KindWithContent::IndexUpdate {
index_uid: String::from("doggo"),
primary_key: None,
}
KindWithContent::IndexUpdate { index_uid: String::from("doggo"), primary_key: None }
}
fn idx_del() -> KindWithContent {
KindWithContent::IndexDeletion {
index_uid: String::from("doggo"),
}
KindWithContent::IndexDeletion { index_uid: String::from("doggo") }
}
fn idx_swap() -> KindWithContent {
KindWithContent::IndexSwap {
swaps: vec![(String::from("doggo"), String::from("catto"))],
}
KindWithContent::IndexSwap { swaps: vec![(String::from("doggo"), String::from("catto"))] }
}
#[test]

View File

@ -21,31 +21,26 @@ use std::collections::HashSet;
use std::fs::File;
use std::io::BufWriter;
use crate::utils::{self, swap_index_uid_in_task};
use crate::Query;
use crate::{autobatcher::BatchKind, Error, IndexScheduler, Result, TaskId};
use dump::IndexMetadata;
use log::{debug, error, info};
use meilisearch_types::milli::documents::obkv_to_object;
use meilisearch_types::milli::update::IndexDocumentsConfig;
use meilisearch_types::heed::{RoTxn, RwTxn};
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
use meilisearch_types::milli::update::{
DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsMethod,
};
use meilisearch_types::milli::{
self, documents::DocumentsBatchReader, update::Settings as MilliSettings, BEU32,
DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod,
Settings as MilliSettings,
};
use meilisearch_types::milli::{self, BEU32};
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
use meilisearch_types::tasks::{Details, Kind, KindWithContent, Status, Task};
use meilisearch_types::{
heed::{RoTxn, RwTxn},
Index,
};
use meilisearch_types::Index;
use roaring::RoaringBitmap;
use time::OffsetDateTime;
use uuid::Uuid;
use crate::autobatcher::BatchKind;
use crate::utils::{self, swap_index_uid_in_task};
use crate::{Error, IndexScheduler, Query, Result, TaskId};
/// Represents a combination of tasks that can all be processed at the same time.
///
/// A batch contains the set of tasks that it represents (accessible through
@ -57,28 +52,11 @@ pub(crate) enum Batch {
TaskDeletion(Task),
Snapshot(Vec<Task>),
Dump(Task),
IndexOperation {
op: IndexOperation,
must_create_index: bool,
},
IndexCreation {
index_uid: String,
primary_key: Option<String>,
task: Task,
},
IndexUpdate {
index_uid: String,
primary_key: Option<String>,
task: Task,
},
IndexDeletion {
index_uid: String,
tasks: Vec<Task>,
index_has_been_created: bool,
},
IndexSwap {
task: Task,
},
IndexOperation { op: IndexOperation, must_create_index: bool },
IndexCreation { index_uid: String, primary_key: Option<String>, task: Task },
IndexUpdate { index_uid: String, primary_key: Option<String>, task: Task },
IndexDeletion { index_uid: String, tasks: Vec<Task>, index_has_been_created: bool },
IndexSwap { task: Task },
}
/// A [batch](Batch) that combines multiple tasks operating on an index.
@ -212,9 +190,7 @@ impl IndexScheduler {
for task in &tasks {
match task.kind {
KindWithContent::DocumentImport {
content_file,
documents_count,
..
content_file, documents_count, ..
} => {
documents_counts.push(documents_count);
content_files.push(content_file);
@ -241,19 +217,15 @@ impl IndexScheduler {
let mut documents = Vec::new();
for task in &tasks {
match task.kind {
KindWithContent::DocumentDeletion {
ref documents_ids, ..
} => documents.extend_from_slice(documents_ids),
KindWithContent::DocumentDeletion { ref documents_ids, .. } => {
documents.extend_from_slice(documents_ids)
}
_ => unreachable!(),
}
}
Ok(Some(Batch::IndexOperation {
op: IndexOperation::DocumentDeletion {
index_uid,
documents,
tasks,
},
op: IndexOperation::DocumentDeletion { index_uid, documents, tasks },
must_create_index,
}))
}
@ -263,49 +235,30 @@ impl IndexScheduler {
let mut settings = Vec::new();
for task in &tasks {
match task.kind {
KindWithContent::Settings {
ref new_settings,
is_deletion,
..
} => settings.push((is_deletion, new_settings.clone())),
KindWithContent::Settings { ref new_settings, is_deletion, .. } => {
settings.push((is_deletion, new_settings.clone()))
}
_ => unreachable!(),
}
}
Ok(Some(Batch::IndexOperation {
op: IndexOperation::Settings {
index_uid,
settings,
tasks,
},
op: IndexOperation::Settings { index_uid, settings, tasks },
must_create_index,
}))
}
BatchKind::ClearAndSettings {
other,
settings_ids,
allow_index_creation,
} => {
BatchKind::ClearAndSettings { other, settings_ids, allow_index_creation } => {
let (index_uid, settings, settings_tasks) = match self
.create_next_batch_index(
rtxn,
index_uid,
BatchKind::Settings {
settings_ids,
allow_index_creation,
},
BatchKind::Settings { settings_ids, allow_index_creation },
must_create_index,
)?
.unwrap()
{
Batch::IndexOperation {
op:
IndexOperation::Settings {
index_uid,
settings,
tasks,
..
},
op: IndexOperation::Settings { index_uid, settings, tasks, .. },
..
} => (index_uid, settings, tasks),
_ => unreachable!(),
@ -345,21 +298,14 @@ impl IndexScheduler {
let settings = self.create_next_batch_index(
rtxn,
index_uid.clone(),
BatchKind::Settings {
settings_ids,
allow_index_creation,
},
BatchKind::Settings { settings_ids, allow_index_creation },
must_create_index,
)?;
let document_import = self.create_next_batch_index(
rtxn,
index_uid.clone(),
BatchKind::DocumentImport {
method,
allow_index_creation,
import_ids,
},
BatchKind::DocumentImport { method, allow_index_creation, import_ids },
must_create_index,
)?;
@ -377,12 +323,7 @@ impl IndexScheduler {
..
}),
Some(Batch::IndexOperation {
op:
IndexOperation::Settings {
settings,
tasks: settings_tasks,
..
},
op: IndexOperation::Settings { settings, tasks: settings_tasks, .. },
..
}),
) => Ok(Some(Batch::IndexOperation {
@ -404,17 +345,12 @@ impl IndexScheduler {
BatchKind::IndexCreation { id } => {
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
let (index_uid, primary_key) = match &task.kind {
KindWithContent::IndexCreation {
index_uid,
primary_key,
} => (index_uid.clone(), primary_key.clone()),
KindWithContent::IndexCreation { index_uid, primary_key } => {
(index_uid.clone(), primary_key.clone())
}
_ => unreachable!(),
};
Ok(Some(Batch::IndexCreation {
index_uid,
primary_key,
task,
}))
Ok(Some(Batch::IndexCreation { index_uid, primary_key, task }))
}
BatchKind::IndexUpdate { id } => {
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
@ -422,11 +358,7 @@ impl IndexScheduler {
KindWithContent::IndexUpdate { primary_key, .. } => primary_key.clone(),
_ => unreachable!(),
};
Ok(Some(Batch::IndexUpdate {
index_uid,
primary_key,
task,
}))
Ok(Some(Batch::IndexUpdate { index_uid, primary_key, task }))
}
BatchKind::IndexDeletion { ids } => Ok(Some(Batch::IndexDeletion {
index_uid,
@ -453,17 +385,14 @@ impl IndexScheduler {
// 1. we get the last task to cancel.
if let Some(task_id) = to_cancel.max() {
return Ok(Some(Batch::TaskCancelation(
self.get_task(rtxn, task_id)?
.ok_or(Error::CorruptedTaskQueue)?,
self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?,
)));
}
// 2. we get the next task to delete
let to_delete = self.get_kind(rtxn, Kind::TaskDeletion)? & enqueued;
if let Some(task_id) = to_delete.min() {
let task = self
.get_task(rtxn, task_id)?
.ok_or(Error::CorruptedTaskQueue)?;
let task = self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
return Ok(Some(Batch::TaskDeletion(task)));
}
@ -471,25 +400,20 @@ impl IndexScheduler {
// 3. we batch the snapshot.
let to_snapshot = self.get_kind(rtxn, Kind::Snapshot)? & enqueued;
if !to_snapshot.is_empty() {
return Ok(Some(Batch::Snapshot(
self.get_existing_tasks(rtxn, to_snapshot)?,
)));
return Ok(Some(Batch::Snapshot(self.get_existing_tasks(rtxn, to_snapshot)?)));
}
// 4. we batch the dumps.
let to_dump = self.get_kind(rtxn, Kind::DumpExport)? & enqueued;
if let Some(to_dump) = to_dump.min() {
return Ok(Some(Batch::Dump(
self.get_task(rtxn, to_dump)?
.ok_or(Error::CorruptedTaskQueue)?,
self.get_task(rtxn, to_dump)?.ok_or(Error::CorruptedTaskQueue)?,
)));
}
// 5. We take the next task and try to batch all the tasks associated with this index.
if let Some(task_id) = enqueued.min() {
let task = self
.get_task(rtxn, task_id)?
.ok_or(Error::CorruptedTaskQueue)?;
let task = self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
// This is safe because all the remaining task are associated with
// AT LEAST one index. We can use the right or left one it doesn't
@ -500,11 +424,7 @@ impl IndexScheduler {
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
// If autobatching is disabled we only take one task at a time.
let tasks_limit = if self.autobatching_enabled {
usize::MAX
} else {
1
};
let tasks_limit = if self.autobatching_enabled { usize::MAX } else { 1 };
let enqueued = index_tasks
.into_iter()
@ -716,10 +636,7 @@ impl IndexScheduler {
task.status = Status::Succeeded;
Ok(vec![task])
}
Batch::IndexOperation {
op,
must_create_index,
} => {
Batch::IndexOperation { op, must_create_index } => {
let index_uid = op.index_uid();
let index = if must_create_index {
// create the index if it doesn't already exist
@ -738,26 +655,14 @@ impl IndexScheduler {
Ok(tasks)
}
Batch::IndexCreation {
index_uid,
primary_key,
task,
} => {
Batch::IndexCreation { index_uid, primary_key, task } => {
let mut wtxn = self.env.write_txn()?;
self.index_mapper.create_index(&mut wtxn, &index_uid)?;
wtxn.commit()?;
self.process_batch(Batch::IndexUpdate {
index_uid,
primary_key,
task,
})
self.process_batch(Batch::IndexUpdate { index_uid, primary_key, task })
}
Batch::IndexUpdate {
index_uid,
primary_key,
mut task,
} => {
Batch::IndexUpdate { index_uid, primary_key, mut task } => {
let rtxn = self.env.read_txn()?;
let index = self.index_mapper.index(&rtxn, &index_uid)?;
@ -781,11 +686,7 @@ impl IndexScheduler {
Ok(vec![task])
}
Batch::IndexDeletion {
index_uid,
index_has_been_created,
mut tasks,
} => {
Batch::IndexDeletion { index_uid, index_has_been_created, mut tasks } => {
let wtxn = self.env.write_txn()?;
// it's possible that the index doesn't exist
@ -807,9 +708,9 @@ impl IndexScheduler {
for task in &mut tasks {
task.status = Status::Succeeded;
task.details = match &task.kind {
KindWithContent::IndexDeletion { .. } => Some(Details::ClearAll {
deleted_documents: Some(number_of_documents),
}),
KindWithContent::IndexDeletion { .. } => {
Some(Details::ClearAll { deleted_documents: Some(number_of_documents) })
}
otherwise => otherwise.default_finished_details(),
};
}
@ -855,9 +756,7 @@ impl IndexScheduler {
// 3. before_name -> new_name in the task's KindWithContent
for task_id in &index_lhs_task_ids | &index_rhs_task_ids {
let mut task = self
.get_task(&wtxn, task_id)?
.ok_or(Error::CorruptedTaskQueue)?;
let mut task = self.get_task(&wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
swap_index_uid_in_task(&mut task, (lhs, rhs));
self.all_tasks.put(wtxn, &BEU32::new(task_id), &task)?;
}
@ -902,9 +801,7 @@ impl IndexScheduler {
KindWithContent::DocumentClear { .. } => {
let count = if first_clear_found { 0 } else { count };
first_clear_found = true;
Some(Details::ClearAll {
deleted_documents: Some(count),
})
Some(Details::ClearAll { deleted_documents: Some(count) })
}
otherwise => otherwise.default_details(),
};
@ -935,10 +832,7 @@ impl IndexScheduler {
}
}
let config = IndexDocumentsConfig {
update_method: method,
..Default::default()
};
let config = IndexDocumentsConfig { update_method: method, ..Default::default() };
let mut builder = milli::update::IndexDocuments::new(
index_wtxn,
@ -973,15 +867,11 @@ impl IndexScheduler {
info!("document addition done: {:?}", addition);
}
for (task, (ret, count)) in tasks
.iter_mut()
.zip(results.into_iter().zip(documents_counts))
for (task, (ret, count)) in
tasks.iter_mut().zip(results.into_iter().zip(documents_counts))
{
match ret {
Ok(DocumentAdditionResult {
indexed_documents,
number_of_documents,
}) => {
Ok(DocumentAdditionResult { indexed_documents, number_of_documents }) => {
task.status = Status::Succeeded;
task.details = Some(Details::DocumentAddition {
received_documents: number_of_documents,
@ -1001,19 +891,13 @@ impl IndexScheduler {
Ok(tasks)
}
IndexOperation::DocumentDeletion {
index_uid: _,
documents,
mut tasks,
} => {
IndexOperation::DocumentDeletion { index_uid: _, documents, mut tasks } => {
let mut builder = milli::update::DeleteDocuments::new(index_wtxn, index)?;
documents.iter().for_each(|id| {
builder.delete_external_id(id);
});
let DocumentDeletionResult {
deleted_documents, ..
} = builder.execute()?;
let DocumentDeletionResult { deleted_documents, .. } = builder.execute()?;
for (task, documents) in tasks.iter_mut().zip(documents) {
task.status = Status::Succeeded;
@ -1025,11 +909,7 @@ impl IndexScheduler {
Ok(tasks)
}
IndexOperation::Settings {
index_uid: _,
settings,
mut tasks,
} => {
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
let indexer_config = self.index_mapper.indexer_config();
// TODO merge the settings to only do *one* reindexation.
for (task, (_, settings)) in tasks.iter_mut().zip(settings) {
@ -1105,11 +985,7 @@ impl IndexScheduler {
let settings_tasks = self.apply_index_operation(
index_wtxn,
index,
IndexOperation::Settings {
index_uid,
settings,
tasks: settings_tasks,
},
IndexOperation::Settings { index_uid, settings, tasks: settings_tasks },
)?;
let mut tasks = settings_tasks;
@ -1139,9 +1015,7 @@ impl IndexScheduler {
let mut affected_kinds = HashSet::new();
for task_id in to_delete_tasks.iter() {
let task = self
.get_task(wtxn, task_id)?
.ok_or(Error::CorruptedTaskQueue)?;
let task = self.get_task(wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
if let Some(task_indexes) = task.indexes() {
affected_indexes.extend(task_indexes.into_iter().map(|x| x.to_owned()));
}

View File

@ -1,6 +1,5 @@
use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::heed;
use meilisearch_types::milli;
use meilisearch_types::{heed, milli};
use thiserror::Error;
use crate::TaskId;

View File

@ -122,10 +122,7 @@ impl IndexMapper {
}
// Finally we remove the entry from the index map.
assert!(matches!(
index_map.write().unwrap().remove(&uuid),
Some(BeingDeleted)
));
assert!(matches!(index_map.write().unwrap().remove(&uuid), Some(BeingDeleted)));
});
Ok(())
@ -183,8 +180,7 @@ impl IndexMapper {
.iter(rtxn)?
.map(|ret| {
ret.map_err(Error::from).and_then(|(name, _)| {
self.index(rtxn, name)
.map(|index| (name.to_string(), index))
self.index(rtxn, name).map(|index| (name.to_string(), index))
})
})
.collect()

View File

@ -29,29 +29,28 @@ mod utils;
pub type Result<T> = std::result::Result<T, Error>;
pub type TaskId = u32;
use dump::{KindDump, TaskDump, UpdateFile};
pub use error::Error;
use meilisearch_types::milli::documents::DocumentsBatchBuilder;
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
use utils::keep_tasks_within_datetimes;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering::Relaxed};
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering::Relaxed;
use std::sync::{Arc, RwLock};
use dump::{KindDump, TaskDump, UpdateFile};
pub use error::Error;
use file_store::FileStore;
use meilisearch_types::error::ResponseError;
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
use meilisearch_types::heed::{self, Database, Env};
use meilisearch_types::milli;
use meilisearch_types::milli::documents::DocumentsBatchBuilder;
use meilisearch_types::milli::update::IndexerConfig;
use meilisearch_types::milli::{CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
use roaring::RoaringBitmap;
use synchronoise::SignalEvent;
use time::OffsetDateTime;
use utils::keep_tasks_within_datetimes;
use uuid::Uuid;
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
use meilisearch_types::heed::{self, Database, Env};
use meilisearch_types::milli::update::IndexerConfig;
use meilisearch_types::milli::{CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
use crate::index_mapper::IndexMapper;
type BEI128 = meilisearch_types::heed::zerocopy::I128<meilisearch_types::heed::byteorder::BE>;
@ -124,10 +123,7 @@ impl Query {
pub fn with_index(self, index_uid: String) -> Self {
let mut index_vec = self.index_uid.unwrap_or_default();
index_vec.push(index_uid);
Self {
index_uid: Some(index_vec),
..self
}
Self { index_uid: Some(index_vec), ..self }
}
}
@ -142,10 +138,7 @@ struct ProcessingTasks {
impl ProcessingTasks {
/// Creates an empty `ProcessingAt` struct.
fn new() -> ProcessingTasks {
ProcessingTasks {
started_at: OffsetDateTime::now_utc(),
processing: RoaringBitmap::new(),
}
ProcessingTasks { started_at: OffsetDateTime::now_utc(), processing: RoaringBitmap::new() }
}
/// Stores the currently processing tasks, and the date time at which it started.
@ -447,21 +440,11 @@ impl IndexScheduler {
let tasks = self.get_existing_tasks(
&rtxn,
tasks
.into_iter()
.rev()
.take(query.limit.unwrap_or(u32::MAX) as usize),
tasks.into_iter().rev().take(query.limit.unwrap_or(u32::MAX) as usize),
)?;
let ProcessingTasks {
started_at,
processing,
..
} = self
.processing_tasks
.read()
.map_err(|_| Error::CorruptedTaskQueue)?
.clone();
let ProcessingTasks { started_at, processing, .. } =
self.processing_tasks.read().map_err(|_| Error::CorruptedTaskQueue)?.clone();
let ret = tasks.into_iter();
if processing.is_empty() {
@ -469,11 +452,9 @@ impl IndexScheduler {
} else {
Ok(ret
.map(|task| match processing.contains(task.uid) {
true => Task {
status: Status::Processing,
started_at: Some(started_at),
..task
},
true => {
Task { status: Status::Processing, started_at: Some(started_at), ..task }
}
false => task,
})
.collect())
@ -497,8 +478,7 @@ impl IndexScheduler {
status: Status::Enqueued,
kind: kind.clone(),
};
self.all_tasks
.append(&mut wtxn, &BEU32::new(task.uid), &task)?;
self.all_tasks.append(&mut wtxn, &BEU32::new(task.uid), &task)?;
if let Some(indexes) = task.indexes() {
for index in indexes {
@ -527,11 +507,7 @@ impl IndexScheduler {
// we inform the processing tasks to stop (if necessary).
if let KindWithContent::TaskCancelation { tasks, .. } = kind {
let tasks_to_cancel = RoaringBitmap::from_iter(tasks);
if self
.processing_tasks
.read()
.unwrap()
.must_cancel_processing_tasks(&tasks_to_cancel)
if self.processing_tasks.read().unwrap().must_cancel_processing_tasks(&tasks_to_cancel)
{
self.must_stop_processing.must_stop();
}
@ -601,16 +577,14 @@ impl IndexScheduler {
KindDump::DocumentClear => KindWithContent::DocumentClear {
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
},
KindDump::Settings {
settings,
is_deletion,
allow_index_creation,
} => KindWithContent::Settings {
KindDump::Settings { settings, is_deletion, allow_index_creation } => {
KindWithContent::Settings {
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
new_settings: settings,
is_deletion,
allow_index_creation,
},
}
}
KindDump::IndexDeletion => KindWithContent::IndexDeletion {
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
},
@ -629,21 +603,14 @@ impl IndexScheduler {
KindDump::TasksDeletion { query, tasks } => {
KindWithContent::TaskDeletion { query, tasks }
}
KindDump::DumpExport {
dump_uid,
keys,
instance_uid,
} => KindWithContent::DumpExport {
dump_uid,
keys,
instance_uid,
},
KindDump::DumpExport { dump_uid, keys, instance_uid } => {
KindWithContent::DumpExport { dump_uid, keys, instance_uid }
}
KindDump::Snapshot => KindWithContent::Snapshot,
},
};
self.all_tasks
.put(&mut wtxn, &BEU32::new(task.uid), &task)?;
self.all_tasks.put(&mut wtxn, &BEU32::new(task.uid), &task)?;
if let Some(indexes) = task.indexes() {
for index in indexes {
@ -729,19 +696,12 @@ impl IndexScheduler {
// We reset the must_stop flag to be sure that we don't stop processing tasks
self.must_stop_processing.reset();
self.processing_tasks
.write()
.unwrap()
.start_processing_at(started_at, processing_tasks);
self.processing_tasks.write().unwrap().start_processing_at(started_at, processing_tasks);
#[cfg(test)]
{
self.test_breakpoint_sdr
.send(Breakpoint::BatchCreated)
.unwrap();
self.test_breakpoint_sdr
.send(Breakpoint::BeforeProcessing)
.unwrap();
self.test_breakpoint_sdr.send(Breakpoint::BatchCreated).unwrap();
self.test_breakpoint_sdr.send(Breakpoint::BeforeProcessing).unwrap();
}
// 2. Process the tasks
@ -781,16 +741,11 @@ impl IndexScheduler {
}
}
}
self.processing_tasks
.write()
.unwrap()
.stop_processing_at(finished_at);
self.processing_tasks.write().unwrap().stop_processing_at(finished_at);
wtxn.commit()?;
#[cfg(test)]
self.test_breakpoint_sdr
.send(Breakpoint::AfterProcessing)
.unwrap();
self.test_breakpoint_sdr.send(Breakpoint::AfterProcessing).unwrap();
Ok(processed_tasks)
}
@ -812,16 +767,12 @@ mod tests {
use tempfile::TempDir;
use uuid::Uuid;
use crate::snapshot::snapshot_index_scheduler;
use super::*;
use crate::snapshot::snapshot_index_scheduler;
/// Return a `KindWithContent::IndexCreation` task
fn index_creation_task(index: &'static str, primary_key: &'static str) -> KindWithContent {
KindWithContent::IndexCreation {
index_uid: S(index),
primary_key: Some(S(primary_key)),
}
KindWithContent::IndexCreation { index_uid: S(index), primary_key: Some(S(primary_key)) }
}
/// Create a `KindWithContent::DocumentImport` task that imports documents.
///
@ -864,9 +815,7 @@ mod tests {
}}"#
);
let (_uuid, mut file) = index_scheduler
.create_update_file_with_uuid(file_uuid)
.unwrap();
let (_uuid, mut file) = index_scheduler.create_update_file_with_uuid(file_uuid).unwrap();
let documents_count =
meilisearch_types::document_formats::read_json(content.as_bytes(), file.as_file_mut())
.unwrap() as u64;
@ -890,10 +839,8 @@ mod tests {
)
.unwrap();
let index_scheduler_handle = IndexSchedulerHandle {
_tempdir: tempdir,
test_breakpoint_rcv: receiver,
};
let index_scheduler_handle =
IndexSchedulerHandle { _tempdir: tempdir, test_breakpoint_rcv: receiver };
(index_scheduler, index_scheduler_handle)
}
@ -952,18 +899,12 @@ mod tests {
fn insert_task_while_another_task_is_processing() {
let (index_scheduler, handle) = IndexScheduler::test(true);
index_scheduler
.register(index_creation_task("index_a", "id"))
.unwrap();
index_scheduler.register(index_creation_task("index_a", "id")).unwrap();
handle.wait_till(Breakpoint::BatchCreated);
// while the task is processing can we register another task?
index_scheduler.register(index_creation_task("index_b", "id")).unwrap();
index_scheduler
.register(index_creation_task("index_b", "id"))
.unwrap();
index_scheduler
.register(KindWithContent::IndexDeletion {
index_uid: S("index_a"),
})
.register(KindWithContent::IndexDeletion { index_uid: S("index_a") })
.unwrap();
snapshot!(snapshot_index_scheduler(&index_scheduler));
@ -976,21 +917,13 @@ mod tests {
let (index_scheduler, handle) = IndexScheduler::test(true);
index_scheduler
.register(KindWithContent::IndexCreation {
index_uid: S("doggos"),
primary_key: None,
})
.register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None })
.unwrap();
index_scheduler
.register(KindWithContent::IndexCreation {
index_uid: S("cattos"),
primary_key: None,
})
.register(KindWithContent::IndexCreation { index_uid: S("cattos"), primary_key: None })
.unwrap();
index_scheduler
.register(KindWithContent::IndexDeletion {
index_uid: S("doggos"),
})
.register(KindWithContent::IndexDeletion { index_uid: S("doggos") })
.unwrap();
handle.wait_till(Breakpoint::Start);
@ -1011,25 +944,16 @@ mod tests {
let (index_scheduler, handle) = IndexScheduler::test(false);
index_scheduler
.register(KindWithContent::IndexCreation {
index_uid: S("doggos"),
primary_key: None,
})
.register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None })
.unwrap();
index_scheduler
.register(KindWithContent::DocumentClear {
index_uid: S("doggos"),
})
.register(KindWithContent::DocumentClear { index_uid: S("doggos") })
.unwrap();
index_scheduler
.register(KindWithContent::DocumentClear {
index_uid: S("doggos"),
})
.register(KindWithContent::DocumentClear { index_uid: S("doggos") })
.unwrap();
index_scheduler
.register(KindWithContent::DocumentClear {
index_uid: S("doggos"),
})
.register(KindWithContent::DocumentClear { index_uid: S("doggos") })
.unwrap();
handle.wait_till(Breakpoint::AfterProcessing);
@ -1211,10 +1135,7 @@ mod tests {
}"#;
index_scheduler
.register(KindWithContent::IndexCreation {
index_uid: S("doggos"),
primary_key: None,
})
.register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None })
.unwrap();
let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap();
@ -1233,9 +1154,7 @@ mod tests {
})
.unwrap();
index_scheduler
.register(KindWithContent::IndexDeletion {
index_uid: S("doggos"),
})
.register(KindWithContent::IndexDeletion { index_uid: S("doggos") })
.unwrap();
snapshot!(snapshot_index_scheduler(&index_scheduler));
@ -1263,9 +1182,7 @@ mod tests {
for name in index_names {
index_scheduler
.register(KindWithContent::DocumentClear {
index_uid: name.to_string(),
})
.register(KindWithContent::DocumentClear { index_uid: name.to_string() })
.unwrap();
}
@ -1308,10 +1225,7 @@ mod tests {
index_scheduler
.register(KindWithContent::IndexSwap {
swaps: vec![
("a".to_owned(), "b".to_owned()),
("c".to_owned(), "d".to_owned()),
],
swaps: vec![("a".to_owned(), "b".to_owned()), ("c".to_owned(), "d".to_owned())],
})
.unwrap();
@ -1319,9 +1233,7 @@ mod tests {
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_swap_processed");
index_scheduler
.register(KindWithContent::IndexSwap {
swaps: vec![("a".to_owned(), "c".to_owned())],
})
.register(KindWithContent::IndexSwap { swaps: vec![("a".to_owned(), "c".to_owned())] })
.unwrap();
handle.wait_till(Breakpoint::AfterProcessing);
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_swap_processed");
@ -1353,9 +1265,7 @@ mod tests {
})
.unwrap();
index_scheduler
.register(KindWithContent::IndexDeletion {
index_uid: S("doggos"),
})
.register(KindWithContent::IndexDeletion { index_uid: S("doggos") })
.unwrap();
snapshot!(snapshot_index_scheduler(&index_scheduler));

View File

@ -1,16 +1,11 @@
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
use meilisearch_types::heed::{Database, RoTxn};
use meilisearch_types::milli::{CboRoaringBitmapCodec, RoaringBitmapCodec, BEU32};
use meilisearch_types::tasks::Details;
use meilisearch_types::{
heed::{
types::{OwnedType, SerdeBincode, SerdeJson, Str},
Database, RoTxn,
},
tasks::Task,
};
use meilisearch_types::tasks::{Details, Task};
use roaring::RoaringBitmap;
use crate::BEI128;
use crate::{index_mapper::IndexMapper, IndexScheduler, Kind, Status};
use crate::index_mapper::IndexMapper;
use crate::{IndexScheduler, Kind, Status, BEI128};
pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
let IndexScheduler {
@ -37,9 +32,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
let mut snap = String::new();
let processing_tasks = processing_tasks.read().unwrap().processing.clone();
snap.push_str(&format!(
"### Autobatching Enabled = {autobatching_enabled}\n"
));
snap.push_str(&format!("### Autobatching Enabled = {autobatching_enabled}\n"));
snap.push_str("### Processing Tasks:\n");
snap.push_str(&snapshot_bitmap(&processing_tasks));
snap.push_str("\n----------------------------------------------------------------------\n");
@ -151,6 +144,7 @@ fn snapshot_task(task: &Task) -> String {
snap.push('}');
snap
}
fn snaphsot_details(d: &Details) -> String {
match d {
Details::DocumentAddition {
@ -191,8 +185,7 @@ fn snaphsot_details(d: &Details) -> String {
},
Details::IndexSwap { swaps } => {
format!("{{ indexes: {swaps:?} }}")
},
}
}
}
@ -205,6 +198,7 @@ fn snapshot_status(rtxn: &RoTxn, db: Database<SerdeBincode<Status>, RoaringBitma
}
snap
}
fn snapshot_kind(rtxn: &RoTxn, db: Database<SerdeBincode<Kind>, RoaringBitmapCodec>) -> String {
let mut snap = String::new();
let mut iter = db.iter(rtxn).unwrap();
@ -227,11 +221,6 @@ fn snapshot_index_tasks(rtxn: &RoTxn, db: Database<Str, RoaringBitmapCodec>) ->
}
fn snapshot_index_mapper(rtxn: &RoTxn, mapper: &IndexMapper) -> String {
let names = mapper
.indexes(rtxn)
.unwrap()
.into_iter()
.map(|(n, _)| n)
.collect::<Vec<_>>();
let names = mapper.indexes(rtxn).unwrap().into_iter().map(|(n, _)| n).collect::<Vec<_>>();
format!("{names:?}")
}

View File

@ -2,29 +2,22 @@
use std::ops::Bound;
use meilisearch_types::heed::types::OwnedType;
use meilisearch_types::heed::Database;
use meilisearch_types::heed::{types::DecodeIgnore, RoTxn, RwTxn};
use meilisearch_types::heed::types::{DecodeIgnore, OwnedType};
use meilisearch_types::heed::{Database, RoTxn, RwTxn};
use meilisearch_types::milli::{CboRoaringBitmapCodec, BEU32};
use meilisearch_types::tasks::{Kind, KindWithContent, Status};
use roaring::{MultiOps, RoaringBitmap};
use time::OffsetDateTime;
use crate::{Error, IndexScheduler, Result, Task, TaskId, BEI128};
use meilisearch_types::tasks::{Kind, KindWithContent, Status};
impl IndexScheduler {
pub(crate) fn all_task_ids(&self, rtxn: &RoTxn) -> Result<RoaringBitmap> {
enum_iterator::all()
.map(|s| self.get_status(&rtxn, s))
.union()
enum_iterator::all().map(|s| self.get_status(&rtxn, s)).union()
}
pub(crate) fn last_task_id(&self, rtxn: &RoTxn) -> Result<Option<TaskId>> {
Ok(self
.all_tasks
.remap_data_type::<DecodeIgnore>()
.last(rtxn)?
.map(|(k, _)| k.get() + 1))
Ok(self.all_tasks.remap_data_type::<DecodeIgnore>().last(rtxn)?.map(|(k, _)| k.get() + 1))
}
pub(crate) fn next_task_id(&self, rtxn: &RoTxn) -> Result<TaskId> {
@ -45,16 +38,13 @@ impl IndexScheduler {
tasks
.into_iter()
.map(|task_id| {
self.get_task(rtxn, task_id)
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))
self.get_task(rtxn, task_id).and_then(|task| task.ok_or(Error::CorruptedTaskQueue))
})
.collect::<Result<_>>()
}
pub(crate) fn update_task(&self, wtxn: &mut RwTxn, task: &Task) -> Result<()> {
let old_task = self
.get_task(wtxn, task.uid)?
.ok_or(Error::CorruptedTaskQueue)?;
let old_task = self.get_task(wtxn, task.uid)?.ok_or(Error::CorruptedTaskQueue)?;
debug_assert_eq!(old_task.uid, task.uid);
@ -85,19 +75,13 @@ impl IndexScheduler {
"Cannot update a task's enqueued_at time"
);
if old_task.started_at != task.started_at {
assert!(
old_task.started_at.is_none(),
"Cannot update a task's started_at time"
);
assert!(old_task.started_at.is_none(), "Cannot update a task's started_at time");
if let Some(started_at) = task.started_at {
insert_task_datetime(wtxn, self.started_at, started_at, task.uid)?;
}
}
if old_task.finished_at != task.finished_at {
assert!(
old_task.finished_at.is_none(),
"Cannot update a task's finished_at time"
);
assert!(old_task.finished_at.is_none(), "Cannot update a task's finished_at time");
if let Some(finished_at) = task.finished_at {
insert_task_datetime(wtxn, self.finished_at, finished_at, task.uid)?;
}
@ -269,7 +253,9 @@ pub fn swap_index_uid_in_task(task: &mut Task, swap: (&str, &str)) {
}
}
}
K::TaskCancelation { .. } | K::TaskDeletion { .. } | K::DumpExport { .. } | K::Snapshot => (),
K::TaskCancelation { .. } | K::TaskDeletion { .. } | K::DumpExport { .. } | K::Snapshot => {
()
}
};
for index_uid in index_uids {
if index_uid == &swap.0 {

View File

@ -1,10 +1,10 @@
use once_cell::sync::Lazy;
use std::borrow::Cow;
use std::path::PathBuf;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Mutex;
use std::{collections::HashMap, path::Path};
pub use insta;
use once_cell::sync::Lazy;
static SNAPSHOT_NAMES: Lazy<Mutex<HashMap<PathBuf, usize>>> = Lazy::new(|| Mutex::default());
@ -23,18 +23,9 @@ pub fn default_snapshot_settings_for_test(name: Option<&str>) -> (insta::Setting
let filename = path.file_name().unwrap().to_str().unwrap();
settings.set_omit_expression(true);
let test_name = std::thread::current()
.name()
.unwrap()
.rsplit("::")
.next()
.unwrap()
.to_owned();
let test_name = std::thread::current().name().unwrap().rsplit("::").next().unwrap().to_owned();
let path = Path::new("snapshots")
.join(filename)
.join(&test_name)
.to_owned();
let path = Path::new("snapshots").join(filename).join(&test_name).to_owned();
settings.set_snapshot_path(path.clone());
let snap_name = if let Some(name) = name {
Cow::Borrowed(name)

View File

@ -1,10 +1,9 @@
use serde_json::Deserializer;
use std::fs::File;
use std::io::BufReader;
use std::io::Write;
use std::io::{BufReader, Write};
use std::path::Path;
use serde_json::Deserializer;
use crate::{AuthController, HeedAuthStore, Result};
const KEYS_PATH: &str = "keys";

View File

@ -7,18 +7,16 @@ use std::ops::Deref;
use std::path::Path;
use std::sync::Arc;
use error::{AuthControllerError, Result};
use meilisearch_types::keys::{Action, Key};
use meilisearch_types::star_or::StarOr;
use serde::{Deserialize, Serialize};
use serde_json::Value;
pub use store::open_auth_store_env;
use store::{generate_key_as_hexa, HeedAuthStore};
use time::OffsetDateTime;
use uuid::Uuid;
use error::{AuthControllerError, Result};
use meilisearch_types::star_or::StarOr;
use store::generate_key_as_hexa;
pub use store::open_auth_store_env;
use store::HeedAuthStore;
#[derive(Clone)]
pub struct AuthController {
store: Arc<HeedAuthStore>,
@ -33,18 +31,13 @@ impl AuthController {
generate_default_keys(&store)?;
}
Ok(Self {
store: Arc::new(store),
master_key: master_key.clone(),
})
Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
}
pub fn create_key(&self, value: Value) -> Result<Key> {
let key = Key::create_from_value(value)?;
match self.store.get_api_key(key.uid)? {
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(
key.uid.to_string(),
)),
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(key.uid.to_string())),
None => self.store.put_api_key(key),
}
}
@ -63,9 +56,9 @@ impl AuthController {
pub fn get_optional_uid_from_encoded_key(&self, encoded_key: &[u8]) -> Result<Option<Uuid>> {
match &self.master_key {
Some(master_key) => self
.store
.get_uid_from_encoded_key(encoded_key, master_key.as_bytes()),
Some(master_key) => {
self.store.get_uid_from_encoded_key(encoded_key, master_key.as_bytes())
}
None => Ok(None),
}
}
@ -131,9 +124,7 @@ impl AuthController {
/// Generate a valid key from a key id using the current master key.
/// Returns None if no master key has been set.
pub fn generate_key(&self, uid: Uuid) -> Option<String> {
self.master_key
.as_ref()
.map(|master_key| generate_key_as_hexa(uid, master_key.as_bytes()))
self.master_key.as_ref().map(|master_key| generate_key_as_hexa(uid, master_key.as_bytes()))
}
/// Check if the provided key is authorized to make a specific action
@ -151,8 +142,7 @@ impl AuthController {
.or(match index {
// else check if the key has access to the requested index.
Some(index) => {
self.store
.get_expiration_date(uid, action, Some(index.as_bytes()))?
self.store.get_expiration_date(uid, action, Some(index.as_bytes()))?
}
// or to any index if no index has been requested.
None => self.store.prefix_first_expiration_date(uid, action)?,
@ -185,10 +175,7 @@ pub struct AuthFilter {
impl Default for AuthFilter {
fn default() -> Self {
Self {
search_rules: SearchRules::default(),
allow_index_creation: true,
}
Self { search_rules: SearchRules::default(), allow_index_creation: true }
}
}
@ -223,10 +210,9 @@ impl SearchRules {
None
}
}
Self::Map(map) => map
.get(index)
.or_else(|| map.get("*"))
.map(|isr| isr.clone().unwrap_or_default()),
Self::Map(map) => {
map.get(index).or_else(|| map.get("*")).map(|isr| isr.clone().unwrap_or_default())
}
}
}
}

View File

@ -1,8 +1,7 @@
use std::borrow::Cow;
use std::cmp::Reverse;
use std::collections::HashSet;
use std::convert::TryFrom;
use std::convert::TryInto;
use std::convert::{TryFrom, TryInto};
use std::fs::create_dir_all;
use std::ops::Deref;
use std::path::Path;
@ -59,12 +58,7 @@ impl HeedAuthStore {
let keys = env.create_database(Some(KEY_DB_NAME))?;
let action_keyid_index_expiration =
env.create_database(Some(KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME))?;
Ok(Self {
env,
keys,
action_keyid_index_expiration,
should_close_on_drop: true,
})
Ok(Self { env, keys, action_keyid_index_expiration, should_close_on_drop: true })
}
pub fn set_drop_on_close(&mut self, v: bool) {
@ -94,11 +88,7 @@ impl HeedAuthStore {
Action::All => actions.extend(enum_iterator::all::<Action>()),
Action::DocumentsAll => {
actions.extend(
[
Action::DocumentsGet,
Action::DocumentsDelete,
Action::DocumentsAdd,
]
[Action::DocumentsGet, Action::DocumentsDelete, Action::DocumentsAdd]
.iter(),
);
}

View File

@ -72,11 +72,8 @@ mod mini_dashboard {
resource_dir(&dashboard_dir).build()?;
// Write the sha1 for the dashboard back to file.
let mut file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(sha1_path)?;
let mut file =
OpenOptions::new().write(true).create(true).truncate(true).open(sha1_path)?;
file.write_all(sha1.as_bytes())?;
file.flush()?;

View File

@ -1,12 +1,13 @@
use std::{any::Any, sync::Arc};
use std::any::Any;
use std::sync::Arc;
use actix_web::HttpRequest;
use meilisearch_types::InstanceUid;
use serde_json::Value;
use crate::{routes::indexes::documents::UpdateDocumentsQuery, Opt};
use super::{find_user_id, Analytics};
use crate::routes::indexes::documents::UpdateDocumentsQuery;
use crate::Opt;
pub struct MockAnalytics {
instance_uid: Option<InstanceUid>,

View File

@ -9,14 +9,13 @@ use std::str::FromStr;
use actix_web::HttpRequest;
use meilisearch_types::InstanceUid;
pub use mock_analytics::MockAnalytics;
use once_cell::sync::Lazy;
use platform_dirs::AppDirs;
use serde_json::Value;
use crate::routes::indexes::documents::UpdateDocumentsQuery;
pub use mock_analytics::MockAnalytics;
// if we are in debug mode OR the analytics feature is disabled
// the `SegmentAnalytics` point to the mock instead of the real analytics
#[cfg(any(debug_assertions, not(feature = "analytics")))]
@ -42,12 +41,7 @@ fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
db_path
.canonicalize()
.ok()
.map(|path| {
path.join("instance-uid")
.display()
.to_string()
.replace('/', "-")
})
.map(|path| path.join("instance-uid").display().to_string().replace('/', "-"))
.zip(MEILISEARCH_CONFIG_PATH.as_ref())
.map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-')))
}

View File

@ -21,6 +21,7 @@ use tokio::select;
use tokio::sync::mpsc::{self, Receiver, Sender};
use uuid::Uuid;
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
use crate::analytics::Analytics;
use crate::option::default_http_addr;
use crate::routes::indexes::documents::UpdateDocumentsQuery;
@ -31,16 +32,13 @@ use crate::search::{
};
use crate::Opt;
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
fn write_user_id(db_path: &Path, user_id: &InstanceUid) {
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
if let Some((meilisearch_config_path, user_id_path)) = MEILISEARCH_CONFIG_PATH
.as_ref()
.zip(config_user_id_path(db_path))
if let Some((meilisearch_config_path, user_id_path)) =
MEILISEARCH_CONFIG_PATH.as_ref().zip(config_user_id_path(db_path))
{
let _ = fs::create_dir_all(&meilisearch_config_path);
let _ = fs::write(user_id_path, user_id.to_string());
@ -84,22 +82,16 @@ impl SegmentAnalytics {
let instance_uid = instance_uid.unwrap_or_else(|| Uuid::new_v4());
write_user_id(&opt.db_path, &instance_uid);
let client = reqwest::Client::builder()
.connect_timeout(Duration::from_secs(10))
.build();
let client = reqwest::Client::builder().connect_timeout(Duration::from_secs(10)).build();
// if reqwest throws an error we won't be able to send analytics
if client.is_err() {
return super::MockAnalytics::new(opt);
}
let client = HttpClient::new(
client.unwrap(),
"https://telemetry.meilisearch.com".to_string(),
);
let user = User::UserId {
user_id: instance_uid.to_string(),
};
let client =
HttpClient::new(client.unwrap(), "https://telemetry.meilisearch.com".to_string());
let user = User::UserId { user_id: instance_uid.to_string() };
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
// If Meilisearch is Launched for the first time:
@ -108,9 +100,7 @@ impl SegmentAnalytics {
if first_time_run {
let _ = batcher
.push(Track {
user: User::UserId {
user_id: "total_launch".to_string(),
},
user: User::UserId { user_id: "total_launch".to_string() },
event: "Launched".to_string(),
..Default::default()
})
@ -139,11 +129,7 @@ impl SegmentAnalytics {
});
tokio::spawn(segment.run(index_scheduler.clone()));
let this = Self {
instance_uid,
sender,
user: user.clone(),
};
let this = Self { instance_uid, sender, user: user.clone() };
Arc::new(this)
}
@ -164,21 +150,15 @@ impl super::Analytics for SegmentAnalytics {
properties: send,
..Default::default()
};
let _ = self
.sender
.try_send(AnalyticsMsg::BatchMessage(event.into()));
let _ = self.sender.try_send(AnalyticsMsg::BatchMessage(event.into()));
}
fn get_search(&self, aggregate: SearchAggregator) {
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
}
fn post_search(&self, aggregate: SearchAggregator) {
let _ = self
.sender
.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
}
fn add_documents(
@ -188,9 +168,7 @@ impl super::Analytics for SegmentAnalytics {
request: &HttpRequest,
) {
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
}
fn update_documents(
@ -200,9 +178,7 @@ impl super::Analytics for SegmentAnalytics {
request: &HttpRequest,
) {
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
}
}
@ -261,11 +237,8 @@ impl Segment {
infos
};
let number_of_documents = stats
.indexes
.values()
.map(|index| index.number_of_documents)
.collect::<Vec<u64>>();
let number_of_documents =
stats.indexes.values().map(|index| index.number_of_documents).collect::<Vec<u64>>();
json!({
"start_since_days": FIRST_START_TIMESTAMP.elapsed().as_secs() / (60 * 60 * 24), // one day
@ -413,11 +386,7 @@ impl SearchAggregator {
let syntax = match filter {
Value::String(_) => "string".to_string(),
Value::Array(values) => {
if values
.iter()
.map(|v| v.to_string())
.any(|s| RE.is_match(&s))
{
if values.iter().map(|v| v.to_string()).any(|s| RE.is_match(&s)) {
"mixed".to_string()
} else {
"array".to_string()
@ -448,8 +417,7 @@ impl SearchAggregator {
ret.finite_pagination = 0;
}
ret.matching_strategy
.insert(format!("{:?}", query.matching_strategy), 1);
ret.matching_strategy.insert(format!("{:?}", query.matching_strategy), 1);
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
@ -481,17 +449,14 @@ impl SearchAggregator {
self.time_spent.append(&mut other.time_spent);
// sort
self.sort_with_geo_point |= other.sort_with_geo_point;
self.sort_sum_of_criteria_terms = self
.sort_sum_of_criteria_terms
.saturating_add(other.sort_sum_of_criteria_terms);
self.sort_total_number_of_criteria = self
.sort_total_number_of_criteria
.saturating_add(other.sort_total_number_of_criteria);
self.sort_sum_of_criteria_terms =
self.sort_sum_of_criteria_terms.saturating_add(other.sort_sum_of_criteria_terms);
self.sort_total_number_of_criteria =
self.sort_total_number_of_criteria.saturating_add(other.sort_total_number_of_criteria);
// filter
self.filter_with_geo_radius |= other.filter_with_geo_radius;
self.filter_sum_of_criteria_terms = self
.filter_sum_of_criteria_terms
.saturating_add(other.filter_sum_of_criteria_terms);
self.filter_sum_of_criteria_terms =
self.filter_sum_of_criteria_terms.saturating_add(other.filter_sum_of_criteria_terms);
self.filter_total_number_of_criteria = self
.filter_total_number_of_criteria
.saturating_add(other.filter_total_number_of_criteria);

View File

@ -33,11 +33,7 @@ impl<P, D> GuardedData<P, D> {
{
match Self::authenticate(auth, token, index).await? {
Some(filters) => match data {
Some(data) => Ok(Self {
data,
filters,
_marker: PhantomData,
}),
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
None => Err(AuthenticationError::IrretrievableState.into()),
},
None => Err(AuthenticationError::InvalidToken.into()),
@ -52,12 +48,7 @@ impl<P, D> GuardedData<P, D> {
match Self::authenticate(auth, String::new(), None).await? {
Some(filters) => match data {
Some(data) => Ok(Self {
data,
filters,
_marker: PhantomData,
}),
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
None => Err(AuthenticationError::IrretrievableState.into()),
},
None if missing_master_key => Err(AuthenticationError::MissingMasterKey.into()),
@ -133,14 +124,14 @@ pub trait Policy {
pub mod policies {
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
use meilisearch_auth::{AuthController, AuthFilter, SearchRules};
// reexport actions in policies in order to be used in routes configuration.
pub use meilisearch_types::keys::{actions, Action};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::extractors::authentication::Policy;
use meilisearch_auth::{AuthController, AuthFilter, SearchRules};
// reexport actions in policies in order to be used in routes configuration.
pub use meilisearch_types::keys::{actions, Action};
fn tenant_token_validation() -> Validation {
let mut validation = Validation::default();
@ -178,10 +169,7 @@ pub mod policies {
// authenticate if token is the master key.
// master key can only have access to keys routes.
// if master key is None only keys routes are inaccessible.
if auth
.get_master_key()
.map_or_else(|| !is_keys_action(A), |mk| mk == token)
{
if auth.get_master_key().map_or_else(|| !is_keys_action(A), |mk| mk == token) {
return Some(AuthFilter::default());
}
@ -239,9 +227,7 @@ pub mod policies {
}
}
return auth
.get_key_filters(uid, Some(data.claims.search_rules))
.ok();
return auth.get_key_filters(uid, Some(data.claims.search_rules)).ok();
}
None

View File

@ -1,7 +1,10 @@
#![allow(non_snake_case)]
use std::{future::Future, pin::Pin, task::Poll};
use std::future::Future;
use std::pin::Pin;
use std::task::Poll;
use actix_web::{dev::Payload, FromRequest, Handler, HttpRequest};
use actix_web::dev::Payload;
use actix_web::{FromRequest, Handler, HttpRequest};
use pin_project_lite::pin_project;
/// `SeqHandler` is an actix `Handler` that enforces that extractors errors are returned in the

View File

@ -13,37 +13,32 @@ pub mod metrics;
#[cfg(feature = "metrics")]
pub mod route_metrics;
use std::{
fs::File,
io::{BufReader, BufWriter},
path::Path,
sync::{atomic::AtomicBool, Arc},
};
use std::fs::File;
use std::io::{BufReader, BufWriter};
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use crate::error::MeilisearchHttpError;
use actix_cors::Cors;
use actix_http::body::MessageBody;
use actix_web::{dev::ServiceFactory, error::JsonPayloadError, middleware};
use actix_web::{dev::ServiceResponse, web::Data};
use actix_web::dev::{ServiceFactory, ServiceResponse};
use actix_web::error::JsonPayloadError;
use actix_web::web::Data;
use actix_web::{middleware, web, HttpRequest};
use analytics::Analytics;
use anyhow::bail;
use error::PayloadError;
use http::header::CONTENT_TYPE;
use meilisearch_types::{
milli::{
self,
documents::{DocumentsBatchBuilder, DocumentsBatchReader},
update::{IndexDocumentsConfig, IndexDocumentsMethod},
},
settings::apply_settings_to_builder,
};
pub use option::Opt;
use actix_web::{web, HttpRequest};
use extractors::payload::PayloadConfig;
use http::header::CONTENT_TYPE;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
use meilisearch_types::milli::{self};
use meilisearch_types::settings::apply_settings_to_builder;
pub use option::Opt;
use crate::error::MeilisearchHttpError;
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
@ -103,14 +98,9 @@ pub fn create_app(
)
.wrap(middleware::Logger::default())
.wrap(middleware::Compress::default())
.wrap(middleware::NormalizePath::new(
middleware::TrailingSlash::Trim,
));
.wrap(middleware::NormalizePath::new(middleware::TrailingSlash::Trim));
#[cfg(feature = "metrics")]
let app = app.wrap(Condition::new(
opt.enable_metrics_route,
route_metrics::RouteMetrics,
));
let app = app.wrap(Condition::new(opt.enable_metrics_route, route_metrics::RouteMetrics));
app
}
@ -154,30 +144,18 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(IndexScheduler, AuthContr
if empty_db && src_path_exists {
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
import_dump(
&opt.db_path,
path,
&mut index_scheduler,
&mut auth_controller,
)?;
import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller)?;
(index_scheduler, auth_controller)
} else if !empty_db && !opt.ignore_dump_if_db_exists {
bail!(
"database already exists at {:?}, try to delete it or rename it",
opt.db_path
.canonicalize()
.unwrap_or_else(|_| opt.db_path.to_owned())
opt.db_path.canonicalize().unwrap_or_else(|_| opt.db_path.to_owned())
)
} else if !src_path_exists && !opt.ignore_missing_dump {
bail!("dump doesn't exist at {:?}", path)
} else {
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
import_dump(
&opt.db_path,
path,
&mut index_scheduler,
&mut auth_controller,
)?;
import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller)?;
(index_scheduler, auth_controller)
}
} else {
@ -232,10 +210,7 @@ fn import_dump(
// 1. Import the instance-uid.
if let Some(ref instance_uid) = instance_uid {
// we don't want to panic if there is an error with the instance-uid.
let _ = std::fs::write(
db_path.join("instance-uid"),
instance_uid.to_string().as_bytes(),
);
let _ = std::fs::write(db_path.join("instance-uid"), instance_uid.to_string().as_bytes());
};
// 2. Import the `Key`s.
@ -271,10 +246,7 @@ fn import_dump(
log::info!("Importing the settings.");
let settings = index_reader.settings()?;
apply_settings_to_builder(&settings, &mut builder);
builder.execute(
|indexing_step| log::debug!("update: {:?}", indexing_step),
|| false,
)?;
builder.execute(|indexing_step| log::debug!("update: {:?}", indexing_step), || false)?;
// 3.3 Import the documents.
// 3.3.1 We need to recreate the grenad+obkv format accepted by the index.
@ -368,9 +340,7 @@ pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
let generated = generated::generate();
// Generate routes for mini-dashboard assets
for (path, resource) in generated.into_iter() {
let Resource {
mime_type, data, ..
} = resource;
let Resource { mime_type, data, .. } = resource;
// Redirect index.html to /
if path == "index.html" {
config.service(web::resource("/").route(web::get().to(move || async move {

View File

@ -8,8 +8,7 @@ use actix_web::HttpServer;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_http::analytics::Analytics;
use meilisearch_http::{analytics, create_app};
use meilisearch_http::{setup_meilisearch, Opt};
use meilisearch_http::{analytics, create_app, setup_meilisearch, Opt};
#[global_allocator]
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
@ -89,24 +88,22 @@ async fn run_http(
.keep_alive(KeepAlive::Os);
if let Some(config) = opt_clone.get_ssl_config()? {
http_server
.bind_rustls(opt_clone.http_addr, config)?
.run()
.await?;
http_server.bind_rustls(opt_clone.http_addr, config)?.run().await?;
} else {
http_server.bind(&opt_clone.http_addr)?.run().await?;
}
Ok(())
}
pub fn print_launch_resume(opt: &Opt, analytics: Arc<dyn Analytics>, config_read_from: Option<PathBuf>) {
pub fn print_launch_resume(
opt: &Opt,
analytics: Arc<dyn Analytics>,
config_read_from: Option<PathBuf>,
) {
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
let protocol = if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() {
"https"
} else {
"http"
};
let protocol =
if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() { "https" } else { "http" };
let ascii_name = r#"
888b d888 d8b 888 d8b 888
8888b d8888 Y8P 888 Y8P 888
@ -131,10 +128,7 @@ pub fn print_launch_resume(opt: &Opt, analytics: Arc<dyn Analytics>, config_read
eprintln!("Environment:\t\t{:?}", opt.env);
eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string());
eprintln!("Commit date:\t\t{:?}", commit_date.to_string());
eprintln!(
"Package version:\t{:?}",
env!("CARGO_PKG_VERSION").to_string()
);
eprintln!("Package version:\t{:?}", env!("CARGO_PKG_VERSION").to_string());
#[cfg(all(not(debug_assertions), feature = "analytics"))]
{

View File

@ -1,9 +1,8 @@
use lazy_static::lazy_static;
use prometheus::{
opts, register_histogram_vec, register_int_counter_vec, register_int_gauge,
register_int_gauge_vec,
register_int_gauge_vec, HistogramVec, IntCounterVec, IntGauge, IntGaugeVec,
};
use prometheus::{HistogramVec, IntCounterVec, IntGauge, IntGaugeVec};
const HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: &[f64; 14] = &[
0.0005, 0.0008, 0.00085, 0.0009, 0.00095, 0.001, 0.00105, 0.0011, 0.00115, 0.0012, 0.0015,
@ -16,19 +15,14 @@ lazy_static! {
&["method", "path"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!(
"meilisearch_db_size_bytes",
"Meilisearch Db Size In Bytes"
))
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch Db Size In Bytes"))
.expect("Can't create a metric");
pub static ref MEILISEARCH_INDEX_COUNT: IntGauge =
register_int_gauge!(opts!("meilisearch_index_count", "Meilisearch Index Count"))
.expect("Can't create a metric");
pub static ref MEILISEARCH_INDEX_DOCS_COUNT: IntGaugeVec = register_int_gauge_vec!(
opts!(
"meilisearch_index_docs_count",
"Meilisearch Index Docs Count"
),
opts!("meilisearch_index_docs_count", "Meilisearch Index Docs Count"),
&["index"]
)
.expect("Can't create a metric");

View File

@ -1,24 +1,21 @@
use std::convert::TryFrom;
use std::env::VarError;
use std::ffi::OsStr;
use std::io::{BufReader, Read};
use std::num::ParseIntError;
use std::ops::Deref;
use std::path::PathBuf;
use std::str::FromStr;
use std::ffi::OsStr;
use std::env::VarError;
use std::sync::Arc;
use std::{env, fmt, fs};
use byte_unit::{Byte, ByteError};
use clap::Parser;
use meilisearch_types::milli::update::IndexerConfig;
use rustls::{
server::{
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient,
ServerSessionMemoryCache,
},
RootCertStore,
use rustls::server::{
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, ServerSessionMemoryCache,
};
use rustls::RootCertStore;
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
use serde::{Deserialize, Serialize};
use sysinfo::{RefreshKind, System, SystemExt};
@ -502,9 +499,7 @@ pub struct SchedulerConfig {
impl SchedulerConfig {
pub fn export_to_env(self) {
let SchedulerConfig {
disable_auto_batching,
} = self;
let SchedulerConfig { disable_auto_batching } = self;
export_to_env_if_not_present(DISABLE_AUTO_BATCHING, disable_auto_batching.to_string());
}
}
@ -513,9 +508,8 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
type Error = anyhow::Error;
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
let thread_pool = rayon::ThreadPoolBuilder::new()
.num_threads(*other.max_indexing_threads)
.build()?;
let thread_pool =
rayon::ThreadPoolBuilder::new().num_threads(*other.max_indexing_threads).build()?;
Ok(Self {
log_every_n: Some(other.log_every_n),
@ -553,11 +547,7 @@ impl FromStr for MaxMemory {
impl Default for MaxMemory {
fn default() -> MaxMemory {
MaxMemory(
total_memory_bytes()
.map(|bytes| bytes * 2 / 3)
.map(Byte::from_bytes),
)
MaxMemory(total_memory_bytes().map(|bytes| bytes * 2 / 3).map(Byte::from_bytes))
}
}
@ -757,9 +747,7 @@ mod test {
#[test]
fn test_meilli_config_file_path_invalid() {
temp_env::with_vars(
vec![("MEILI_CONFIG_FILE_PATH", Some("../configgg.toml"))],
|| {
temp_env::with_vars(vec![("MEILI_CONFIG_FILE_PATH", Some("../configgg.toml"))], || {
let possible_error_messages = [
"unable to open or read the \"../configgg.toml\" configuration file: No such file or directory (os error 2).",
"unable to open or read the \"../configgg.toml\" configuration file: The system cannot find the file specified. (os error 2).", // Windows
@ -771,7 +759,6 @@ mod test {
possible_error_messages,
error_message
);
},
);
});
}
}

View File

@ -1,17 +1,13 @@
use std::future::{ready, Ready};
use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::http::header;
use actix_web::HttpResponse;
use actix_web::{
dev::{self, Service, ServiceRequest, ServiceResponse, Transform},
Error,
};
use actix_web::{Error, HttpResponse};
use futures_util::future::LocalBoxFuture;
use meilisearch_auth::actions;
use meilisearch_lib::MeiliSearch;
use meilisearch_types::error::ResponseError;
use prometheus::HistogramTimer;
use prometheus::{Encoder, TextEncoder};
use prometheus::{Encoder, HistogramTimer, TextEncoder};
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
@ -33,15 +29,11 @@ pub async fn get_metrics(
let encoder = TextEncoder::new();
let mut buffer = vec![];
encoder
.encode(&prometheus::gather(), &mut buffer)
.expect("Failed to encode metrics");
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");
let response = String::from_utf8(buffer).expect("Failed to convert bytes to string");
Ok(HttpResponse::Ok()
.insert_header(header::ContentType(mime::TEXT_PLAIN))
.body(response))
Ok(HttpResponse::Ok().insert_header(header::ContentType(mime::TEXT_PLAIN)).body(response))
}
pub struct RouteMetrics;

View File

@ -1,19 +1,18 @@
use std::str;
use actix_web::{web, HttpRequest, HttpResponse};
use meilisearch_auth::error::AuthControllerError;
use meilisearch_auth::AuthController;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::keys::{Action, Key};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime;
use uuid::Uuid;
use meilisearch_auth::{error::AuthControllerError, AuthController};
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::keys::{Action, Key};
use crate::extractors::{
authentication::{policies::*, GuardedData},
sequential_extractor::SeqHandler,
};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::Pagination;
pub fn configure(cfg: &mut web::ServiceConfig) {
@ -52,10 +51,8 @@ pub async fn list_api_keys(
) -> Result<HttpResponse, ResponseError> {
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let keys = auth_controller.list_keys()?;
let page_view = paginate.auto_paginate_sized(
keys.into_iter()
.map(|k| KeyView::from_key(k, &auth_controller)),
);
let page_view = paginate
.auto_paginate_sized(keys.into_iter().map(|k| KeyView::from_key(k, &auth_controller)));
Ok(page_view)
})

View File

@ -10,7 +10,8 @@ use time::macros::format_description;
use time::OffsetDateTime;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::SummarizedTaskView;
@ -38,9 +39,7 @@ pub async fn create_dump(
dump_uid,
};
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))

View File

@ -2,8 +2,7 @@ use std::io::Cursor;
use actix_web::http::header::CONTENT_TYPE;
use actix_web::web::Data;
use actix_web::HttpMessage;
use actix_web::{web, HttpRequest, HttpResponse};
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
use bstr::ByteSlice;
use futures::StreamExt;
use index_scheduler::IndexScheduler;
@ -23,17 +22,14 @@ use serde_json::Value;
use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::payload::Payload;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::{fold_star_or, PaginationView, SummarizedTaskView};
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
vec![
"application/json".to_string(),
"application/x-ndjson".to_string(),
"text/csv".to_string(),
]
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
});
/// Extracts the mime type from the content type and return
@ -47,9 +43,7 @@ fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpE
content_type.as_bytes().as_bstr().to_string(),
ACCEPTED_CONTENT_TYPE.clone(),
)),
None => Err(MeilisearchHttpError::MissingContentType(
ACCEPTED_CONTENT_TYPE.clone(),
)),
None => Err(MeilisearchHttpError::MissingContentType(ACCEPTED_CONTENT_TYPE.clone())),
},
}
}
@ -101,18 +95,10 @@ pub async fn delete_document(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
path: web::Path<DocumentParam>,
) -> Result<HttpResponse, ResponseError> {
let DocumentParam {
document_id,
index_uid,
} = path.into_inner();
let task = KindWithContent::DocumentDeletion {
index_uid,
documents_ids: vec![document_id],
};
let DocumentParam { document_id, index_uid } = path.into_inner();
let task = KindWithContent::DocumentDeletion { index_uid, documents_ids: vec![document_id] };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
@ -133,11 +119,7 @@ pub async fn get_all_documents(
params: web::Query<BrowseQuery>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params);
let BrowseQuery {
limit,
offset,
fields,
} = params.into_inner();
let BrowseQuery { limit, offset, fields } = params.into_inner();
let attributes_to_retrieve = fields.and_then(fold_star_or);
let index = index_scheduler.index(&index_uid)?;
@ -220,10 +202,7 @@ async fn document_addition(
method: IndexDocumentsMethod,
allow_index_creation: bool,
) -> Result<SummarizedTaskView, MeilisearchHttpError> {
let format = match mime_type
.as_ref()
.map(|m| (m.type_().as_str(), m.subtype().as_str()))
{
let format = match mime_type.as_ref().map(|m| (m.type_().as_str(), m.subtype().as_str())) {
Some(("application", "json")) => PayloadType::Json,
Some(("application", "x-ndjson")) => PayloadType::Ndjson,
Some(("text", "csv")) => PayloadType::Csv,
@ -234,9 +213,7 @@ async fn document_addition(
))
}
None => {
return Err(MeilisearchHttpError::MissingContentType(
ACCEPTED_CONTENT_TYPE.clone(),
))
return Err(MeilisearchHttpError::MissingContentType(ACCEPTED_CONTENT_TYPE.clone()))
}
};
@ -308,21 +285,13 @@ pub async fn delete_documents(
debug!("called with params: {:?}", body);
let ids = body
.iter()
.map(|v| {
v.as_str()
.map(String::from)
.unwrap_or_else(|| v.to_string())
})
.map(|v| v.as_str().map(String::from).unwrap_or_else(|| v.to_string()))
.collect();
let task = KindWithContent::DocumentDeletion {
index_uid: path.into_inner(),
documents_ids: ids,
};
let task =
KindWithContent::DocumentDeletion { index_uid: path.into_inner(), documents_ids: ids };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
@ -332,13 +301,9 @@ pub async fn clear_all_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
path: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let task = KindWithContent::DocumentClear {
index_uid: path.into_inner(),
};
let task = KindWithContent::DocumentClear { index_uid: path.into_inner() };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
@ -352,8 +317,7 @@ fn all_documents<'a>(
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
Ok(index.all_documents(rtxn)?.map(move |ret| {
ret.map_err(ResponseError::from)
.and_then(|(_key, document)| -> Result<_, ResponseError> {
ret.map_err(ResponseError::from).and_then(|(_key, document)| -> Result<_, ResponseError> {
Ok(milli::obkv_to_json(&all_fields, &fields_ids_map, document)?)
})
}))

View File

@ -9,11 +9,11 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use time::OffsetDateTime;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, AuthenticationError, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use super::{Pagination, SummarizedTaskView};
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
pub mod documents;
pub mod search;
@ -104,14 +104,9 @@ pub async fn create_index(
Some(&req),
);
let task = KindWithContent::IndexCreation {
index_uid: uid,
primary_key,
};
let task = KindWithContent::IndexCreation { index_uid: uid, primary_key };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
Ok(HttpResponse::Accepted().json(task))
} else {
@ -160,9 +155,7 @@ pub async fn update_index(
};
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
@ -172,13 +165,9 @@ pub async fn delete_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let task = KindWithContent::IndexDeletion {
index_uid: index_uid.into_inner(),
};
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
Ok(HttpResponse::Accepted().json(task))
}
@ -189,11 +178,7 @@ pub async fn get_index_stats(
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Stats Seen".to_string(),
json!({ "per_index_uid": true }),
Some(&req),
);
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": true }), Some(&req));
let stats = IndexStats::new((*index_scheduler).clone(), index_uid.into_inner());

View File

@ -9,7 +9,8 @@ use serde_cs::vec::CS;
use serde_json::Value;
use crate::analytics::{Analytics, SearchAggregator};
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::search::{
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
@ -76,9 +77,7 @@ impl From<SearchQueryGet> for SearchQuery {
.map(|o| o.into_iter().collect()),
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
crop_length: other.crop_length,
attributes_to_highlight: other
.attributes_to_highlight
.map(|o| o.into_iter().collect()),
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
filter,
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
show_matches_position: other.show_matches_position,
@ -147,10 +146,8 @@ pub async fn search_with_url_query(
let mut query: SearchQuery = params.into_inner().into();
// Tenant token search_rules.
if let Some(search_rules) = index_scheduler
.filters()
.search_rules
.get_index_search_rules(&index_uid)
if let Some(search_rules) =
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
{
add_search_rules(&mut query, search_rules);
}
@ -181,10 +178,8 @@ pub async fn search_with_post(
debug!("search called with params: {:?}", query);
// Tenant token search_rules.
if let Some(search_rules) = index_scheduler
.filters()
.search_rules
.get_index_search_rules(&index_uid)
if let Some(search_rules) =
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
{
add_search_rules(&mut query, search_rules);
}
@ -213,13 +208,7 @@ mod test {
let sort = fix_sort_query_parameters("_geoPoint(12, 13):asc");
assert_eq!(sort, vec!["_geoPoint(12,13):asc".to_string()]);
let sort = fix_sort_query_parameters("doggo:asc,_geoPoint(12.45,13.56):desc");
assert_eq!(
sort,
vec![
"doggo:asc".to_string(),
"_geoPoint(12.45,13.56):desc".to_string(),
]
);
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(12.45,13.56):desc".to_string(),]);
let sort = fix_sort_query_parameters(
"doggo:asc , _geoPoint(12.45, 13.56, 2590352):desc , catto:desc",
);
@ -233,12 +222,6 @@ mod test {
);
let sort = fix_sort_query_parameters("doggo:asc , _geoPoint(1, 2), catto:desc");
// This is ugly but eh, I don't want to write a full parser just for this unused route
assert_eq!(
sort,
vec![
"doggo:asc".to_string(),
"_geoPoint(1,2),catto:desc".to_string(),
]
);
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(1,2),catto:desc".to_string(),]);
}
}

View File

@ -1,15 +1,15 @@
use actix_web::web::Data;
use log::debug;
use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::error::ResponseError;
use meilisearch_types::settings::{settings, Settings, Unchecked};
use meilisearch_types::tasks::KindWithContent;
use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::routes::SummarizedTaskView;
#[macro_export]
@ -18,16 +18,15 @@ macro_rules! make_setting_route {
pub mod $attr {
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse, Resource};
use log::debug;
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::error::ResponseError;
use meilisearch_types::milli::update::Setting;
use meilisearch_types::settings::{settings, Settings};
use meilisearch_types::tasks::KindWithContent;
use meilisearch_types::error::ResponseError;
use $crate::analytics::Analytics;
use $crate::extractors::authentication::{policies::*, GuardedData};
use $crate::extractors::authentication::policies::*;
use $crate::extractors::authentication::GuardedData;
use $crate::extractors::sequential_extractor::SeqHandler;
use $crate::routes::SummarizedTaskView;
@ -38,10 +37,7 @@ macro_rules! make_setting_route {
>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let new_settings = Settings {
$attr: Setting::Reset,
..Default::default()
};
let new_settings = Settings { $attr: Setting::Reset, ..Default::default() };
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let task = KindWithContent::Settings {
@ -270,13 +266,7 @@ make_setting_route!(
"synonyms"
);
make_setting_route!(
"/distinct-attribute",
put,
String,
distinct_attribute,
"distinctAttribute"
);
make_setting_route!("/distinct-attribute", put, String, distinct_attribute, "distinctAttribute");
make_setting_route!(
"/ranking-rules",
@ -453,9 +443,7 @@ pub async fn update_all(
allow_index_creation,
};
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
@ -486,9 +474,7 @@ pub async fn delete_all(
allow_index_creation,
};
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))

View File

@ -1,8 +1,5 @@
use std::collections::HashSet;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::tasks::TaskView;
use actix_web::web::Data;
use actix_web::{web, HttpResponse};
use index_scheduler::IndexScheduler;
@ -10,6 +7,11 @@ use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::tasks::KindWithContent;
use serde::Deserialize;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::tasks::TaskView;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(indexes_swap))));
}
@ -33,10 +35,7 @@ pub async fn indexes_swap(
let mut swaps = vec![];
let mut indexes_set = HashSet::<String>::default();
for IndexesSwapPayload {
indexes: (lhs, rhs),
} in params.into_inner().into_iter()
{
for IndexesSwapPayload { indexes: (lhs, rhs) } in params.into_inner().into_iter() {
if !search_rules.is_index_authorized(&lhs) || !search_rules.is_index_authorized(&lhs) {
return Err(ResponseError::from_msg(
"TODO: error message when we swap with an index were not allowed to access"

View File

@ -12,10 +12,10 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use time::OffsetDateTime;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use self::indexes::IndexStats;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
mod api_key;
mod dump;
@ -102,11 +102,7 @@ impl Pagination {
T: Serialize,
{
let total = content.len();
let content: Vec<_> = content
.into_iter()
.skip(self.offset)
.take(self.limit)
.collect();
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
self.format_with(total, content)
}
@ -119,11 +115,7 @@ impl Pagination {
where
T: Serialize,
{
let content: Vec<_> = content
.into_iter()
.skip(self.offset)
.take(self.limit)
.collect();
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
self.format_with(total, content)
}
@ -133,23 +125,13 @@ impl Pagination {
where
T: Serialize,
{
PaginationView {
results,
offset: self.offset,
limit: self.limit,
total,
}
PaginationView { results, offset: self.offset, limit: self.limit, total }
}
}
impl<T> PaginationView<T> {
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
Self {
offset,
limit,
results,
total,
}
Self { offset, limit, results, total }
}
}
@ -211,10 +193,7 @@ pub struct EnqueuedUpdateResult {
pub update_type: UpdateType,
#[serde(with = "time::serde::rfc3339")]
pub enqueued_at: OffsetDateTime,
#[serde(
skip_serializing_if = "Option::is_none",
with = "time::serde::rfc3339::option"
)]
#[serde(skip_serializing_if = "Option::is_none", with = "time::serde::rfc3339::option")]
pub started_processing_at: Option<OffsetDateTime>,
}
@ -275,11 +254,7 @@ async fn get_stats(
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Stats Seen".to_string(),
json!({ "per_index_uid": false }),
Some(&req),
);
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req));
let search_rules = &index_scheduler.filters().search_rules;
let stats = create_all_stats((*index_scheduler).clone(), search_rules)?;
@ -300,9 +275,7 @@ pub fn create_all_stats(
limit: Some(1),
..Query::default()
})?;
let processing_index = processing_task
.first()
.and_then(|task| task.index_uid().clone());
let processing_index = processing_task.first().and_then(|task| task.index_uid().clone());
for (name, index) in index_scheduler.indexes()? {
if !search_rules.is_index_authorized(&name) {
continue;
@ -313,9 +286,7 @@ pub fn create_all_stats(
let rtxn = index.read_txn()?;
let stats = IndexStats {
number_of_documents: index.number_of_documents(&rtxn)?,
is_indexing: processing_index
.as_deref()
.map_or(false, |index_name| name == index_name),
is_indexing: processing_index.as_deref().map_or(false, |index_name| name == index_name),
field_distribution: index.field_distribution(&rtxn)?,
};
@ -324,11 +295,7 @@ pub fn create_all_stats(
indexes.insert(name, stats);
}
let stats = Stats {
database_size,
last_update: last_task,
indexes,
};
let stats = Stats { database_size, last_update: last_task, indexes };
Ok(stats)
}

View File

@ -12,11 +12,11 @@ use serde_json::json;
use time::{Duration, OffsetDateTime};
use tokio::task::block_in_place;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use super::fold_star_or;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
const DEFAULT_LIMIT: fn() -> u32 = || 20;
@ -80,10 +80,7 @@ impl TaskView {
canceled_by: task.canceled_by,
details: task.details.clone().map(DetailsView::from),
error: task.error.clone(),
duration: task
.started_at
.zip(task.finished_at)
.map(|(start, end)| end - start),
duration: task.started_at.zip(task.finished_at).map(|(start, end)| end - start),
enqueued_at: task.enqueued_at,
started_at: task.started_at,
finished_at: task.finished_at,
@ -124,62 +121,45 @@ pub struct DetailsView {
impl From<Details> for DetailsView {
fn from(details: Details) -> Self {
match details.clone() {
Details::DocumentAddition {
received_documents,
indexed_documents,
} => DetailsView {
Details::DocumentAddition { received_documents, indexed_documents } => DetailsView {
received_documents: Some(received_documents),
indexed_documents,
..DetailsView::default()
},
Details::Settings { settings } => DetailsView {
settings: Some(settings),
..DetailsView::default()
},
Details::IndexInfo { primary_key } => DetailsView {
primary_key: Some(primary_key),
..DetailsView::default()
},
Details::DocumentDeletion {
received_document_ids,
deleted_documents,
} => DetailsView {
Details::Settings { settings } => {
DetailsView { settings: Some(settings), ..DetailsView::default() }
}
Details::IndexInfo { primary_key } => {
DetailsView { primary_key: Some(primary_key), ..DetailsView::default() }
}
Details::DocumentDeletion { received_document_ids, deleted_documents } => DetailsView {
received_document_ids: Some(received_document_ids),
deleted_documents: Some(deleted_documents),
..DetailsView::default()
},
Details::ClearAll { deleted_documents } => DetailsView {
deleted_documents: Some(deleted_documents),
..DetailsView::default()
},
Details::TaskCancelation {
matched_tasks,
canceled_tasks,
original_query,
} => DetailsView {
Details::ClearAll { deleted_documents } => {
DetailsView { deleted_documents: Some(deleted_documents), ..DetailsView::default() }
}
Details::TaskCancelation { matched_tasks, canceled_tasks, original_query } => {
DetailsView {
matched_tasks: Some(matched_tasks),
canceled_tasks: Some(canceled_tasks),
original_query: Some(original_query),
..DetailsView::default()
},
Details::TaskDeletion {
matched_tasks,
deleted_tasks,
original_query,
} => DetailsView {
}
}
Details::TaskDeletion { matched_tasks, deleted_tasks, original_query } => DetailsView {
matched_tasks: Some(matched_tasks),
deleted_tasks: Some(deleted_tasks),
original_query: Some(original_query),
..DetailsView::default()
},
Details::Dump { dump_uid } => DetailsView {
dump_uid: Some(dump_uid),
..DetailsView::default()
},
Details::IndexSwap { swaps } => DetailsView {
indexes: Some(swaps),
..Default::default()
},
Details::Dump { dump_uid } => {
DetailsView { dump_uid: Some(dump_uid), ..DetailsView::default() }
}
Details::IndexSwap { swaps } => {
DetailsView { indexes: Some(swaps), ..Default::default() }
}
}
}
}
@ -318,10 +298,8 @@ async fn cancel_tasks(
let filtered_query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
let tasks = index_scheduler.get_task_ids(&filtered_query)?;
let task_cancelation = KindWithContent::TaskCancelation {
query: req.query_string().to_string(),
tasks,
};
let task_cancelation =
KindWithContent::TaskCancelation { query: req.query_string().to_string(), tasks };
let task = block_in_place(|| index_scheduler.register(task_cancelation))?;
let task_view = TaskView::from_task(&task);
@ -377,10 +355,8 @@ async fn delete_tasks(
let filtered_query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
let tasks = index_scheduler.get_task_ids(&filtered_query)?;
let task_deletion = KindWithContent::TaskDeletion {
query: req.query_string().to_string(),
tasks,
};
let task_deletion =
KindWithContent::TaskDeletion { query: req.query_string().to_string(), tasks };
let task = block_in_place(|| index_scheduler.register(task_deletion))?;
let task_view = TaskView::from_task(&task);
@ -448,11 +424,8 @@ async fn get_tasks(
};
let query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
let mut tasks_results: Vec<TaskView> = index_scheduler
.get_tasks(query)?
.into_iter()
.map(|t| TaskView::from_task(&t))
.collect();
let mut tasks_results: Vec<TaskView> =
index_scheduler.get_tasks(query)?.into_iter().map(|t| TaskView::from_task(&t)).collect();
// If we were able to fetch the number +1 tasks we asked
// it means that there is more to come.
@ -483,11 +456,7 @@ async fn get_task(
) -> Result<HttpResponse, ResponseError> {
let task_id = task_id.into_inner();
analytics.publish(
"Tasks Seen".to_string(),
json!({ "per_task_uid": true }),
Some(&req),
);
analytics.publish("Tasks Seen".to_string(), json!({ "per_task_uid": true }), Some(&req));
let search_rules = &index_scheduler.filters().search_rules;
let mut filters = index_scheduler::Query::default();
@ -541,10 +510,9 @@ fn filter_out_inaccessible_indexes_from_query<const ACTION: u8>(
}
pub(crate) mod date_deserializer {
use time::{
format_description::well_known::Rfc3339, macros::format_description, Date, Duration,
OffsetDateTime, Time,
};
use time::format_description::well_known::Rfc3339;
use time::macros::format_description;
use time::{Date, Duration, OffsetDateTime, Time};
enum DeserializeDateOption {
Before,
@ -586,10 +554,11 @@ pub(crate) mod date_deserializer {
/// Deserialize an upper bound datetime with RFC3339 or YYYY-MM-DD.
pub(crate) mod before {
use super::{deserialize_date, DeserializeDateOption};
use serde::Deserializer;
use time::OffsetDateTime;
use super::{deserialize_date, DeserializeDateOption};
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
pub fn deserialize<'a, D: Deserializer<'a>>(
deserializer: D,
@ -638,10 +607,11 @@ pub(crate) mod date_deserializer {
///
/// If YYYY-MM-DD is used, the day is incremented by one.
pub(crate) mod after {
use super::{deserialize_date, DeserializeDateOption};
use serde::Deserializer;
use time::OffsetDateTime;
use super::{deserialize_date, DeserializeDateOption};
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
pub fn deserialize<'a, D: Deserializer<'a>>(
deserializer: D,
@ -689,9 +659,10 @@ pub(crate) mod date_deserializer {
#[cfg(test)]
mod tests {
use crate::routes::tasks::TaskDeletionQuery;
use meili_snap::snapshot;
use crate::routes::tasks::TaskDeletionQuery;
#[test]
fn deserialize_task_deletion_query_datetime() {
{

View File

@ -145,12 +145,7 @@ pub fn perform_search(
search.sort_criteria(sort);
}
let milli::SearchResult {
documents_ids,
matching_words,
candidates,
..
} = search.execute()?;
let milli::SearchResult { documents_ids, matching_words, candidates, .. } = search.execute()?;
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
@ -240,11 +235,7 @@ pub fn perform_search(
insert_geo_distance(sort, &mut document);
}
let hit = SearchHit {
document,
formatted,
matches_position,
};
let hit = SearchHit { document, formatted, matches_position };
documents.push(hit);
}
@ -289,10 +280,7 @@ fn insert_geo_distance(sorts: &[String], document: &mut Document) {
};
if let Some(capture_group) = sorts.iter().find_map(|sort| GEO_REGEX.captures(sort)) {
// TODO: TAMO: milli encountered an internal error, what do we want to do?
let base = [
capture_group[1].parse().unwrap(),
capture_group[2].parse().unwrap(),
];
let base = [capture_group[1].parse().unwrap(), capture_group[2].parse().unwrap()];
let geo_point = &document.get("_geo").unwrap_or(&json!(null));
if let Some((lat, lng)) = geo_point["lat"].as_f64().zip(geo_point["lng"].as_f64()) {
let distance = milli::distance_between_two_points(&base, &[lat, lng]);
@ -341,10 +329,7 @@ fn add_highlight_to_formatted_options(
displayed_ids: &BTreeSet<FieldId>,
) {
for attr in attr_to_highlight {
let new_format = FormatOptions {
highlight: true,
crop: None,
};
let new_format = FormatOptions { highlight: true, crop: None };
if attr == "*" {
for id in displayed_ids {
@ -383,10 +368,7 @@ fn add_crop_to_formatted_options(
formatted_options
.entry(*id)
.and_modify(|f| f.crop = Some(attr_len))
.or_insert(FormatOptions {
highlight: false,
crop: Some(attr_len),
});
.or_insert(FormatOptions { highlight: false, crop: Some(attr_len) });
}
}
@ -395,10 +377,7 @@ fn add_crop_to_formatted_options(
formatted_options
.entry(id)
.and_modify(|f| f.crop = Some(attr_len))
.or_insert(FormatOptions {
highlight: false,
crop: Some(attr_len),
});
.or_insert(FormatOptions { highlight: false, crop: Some(attr_len) });
}
}
}
@ -409,10 +388,7 @@ fn add_non_formatted_ids_to_formatted_options(
to_retrieve_ids: &BTreeSet<FieldId>,
) {
for id in to_retrieve_ids {
formatted_options.entry(*id).or_insert(FormatOptions {
highlight: false,
crop: None,
});
formatted_options.entry(*id).or_insert(FormatOptions { highlight: false, crop: None });
}
}
@ -426,10 +402,7 @@ fn make_document(
// recreate the original json
for (key, value) in obkv.iter() {
let value = serde_json::from_slice(value)?;
let key = field_ids_map
.name(key)
.expect("Missing field name")
.to_string();
let key = field_ids_map.name(key).expect("Missing field name").to_string();
document.insert(key, value);
}
@ -455,9 +428,8 @@ fn format_fields<'a, A: AsRef<[u8]>>(
let mut document = document.clone();
// select the attributes to retrieve
let displayable_names = displayable_ids
.iter()
.map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
let displayable_names =
displayable_ids.iter().map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
permissive_json_pointer::map_leaf_values(&mut document, displayable_names, |key, value| {
// To get the formatting option of each key we need to see all the rules that applies
// to the value and merge them together. eg. If a user said he wanted to highlight `doggo`
@ -473,13 +445,7 @@ fn format_fields<'a, A: AsRef<[u8]>>(
.reduce(|acc, option| acc.merge(option));
let mut infos = Vec::new();
*value = format_value(
std::mem::take(value),
builder,
format,
&mut infos,
compute_matches,
);
*value = format_value(std::mem::take(value), builder, format, &mut infos, compute_matches);
if let Some(matches) = matches_position.as_mut() {
if !infos.is_empty() {

View File

@ -1,7 +1,9 @@
use crate::common::Server;
use std::{thread, time};
use assert_json_diff::assert_json_include;
use serde_json::{json, Value};
use std::{thread, time};
use crate::common::Server;
#[actix_rt::test]
async fn add_valid_api_key() {

View File

@ -1,11 +1,13 @@
use crate::common::Server;
use std::collections::{HashMap, HashSet};
use ::time::format_description::well_known::Rfc3339;
use maplit::{hashmap, hashset};
use once_cell::sync::Lazy;
use serde_json::{json, Value};
use std::collections::{HashMap, HashSet};
use time::{Duration, OffsetDateTime};
use crate::common::Server;
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
Lazy::new(|| {
let mut authorizations = hashmap! {
@ -57,21 +59,14 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
};
if cfg!(feature = "metrics") {
authorizations.insert(
("GET", "/metrics"),
hashset! {"metrics.get", "metrics.*", "*"},
);
authorizations.insert(("GET", "/metrics"), hashset! {"metrics.get", "metrics.*", "*"});
}
authorizations
});
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
AUTHORIZATIONS
.values()
.cloned()
.reduce(|l, r| l.union(&r).cloned().collect())
.unwrap()
AUTHORIZATIONS.values().cloned().reduce(|l, r| l.union(&r).cloned().collect()).unwrap()
});
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
@ -109,13 +104,7 @@ async fn error_access_expired_key() {
for (method, route) in AUTHORIZATIONS.keys() {
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
assert_eq!(403, code, "{:?}", &response);
}
}
@ -146,13 +135,7 @@ async fn error_access_unauthorized_index() {
{
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
assert_eq!(403, code, "{:?}", &response);
}
}
@ -180,13 +163,7 @@ async fn error_access_unauthorized_action() {
server.use_api_key(&key);
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
assert_eq!(403, code, "{:?}", &response);
}
}
@ -201,13 +178,7 @@ async fn access_authorized_master_key() {
for ((method, route), _) in AUTHORIZATIONS.iter() {
let (response, code) = server.dummy_request(method, route).await;
assert_ne!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_ne!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
assert_ne!(code, 403);
}
}

View File

@ -3,11 +3,11 @@ mod authorization;
mod payload;
mod tenant_token;
use crate::common::Server;
use actix_web::http::StatusCode;
use serde_json::{json, Value};
use crate::common::Server;
impl Server {
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
self.service.api_key = Some(api_key.as_ref().to_string());

View File

@ -1,7 +1,8 @@
use crate::common::Server;
use actix_web::test;
use serde_json::{json, Value};
use crate::common::Server;
#[actix_rt::test]
async fn error_api_key_bad_content_types() {
let content = json!({
@ -36,10 +37,7 @@ async fn error_api_key_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// patch
let req = test::TestRequest::patch()
@ -61,10 +59,7 @@ async fn error_api_key_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
#[actix_rt::test]
@ -101,10 +96,7 @@ async fn error_api_key_empty_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// patch
let req = test::TestRequest::patch()
@ -126,10 +118,7 @@ async fn error_api_key_empty_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
#[actix_rt::test]
@ -165,10 +154,7 @@ async fn error_api_key_missing_content_types() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#missing_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
// patch
let req = test::TestRequest::patch()
@ -189,10 +175,7 @@ async fn error_api_key_missing_content_types() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#missing_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
}
#[actix_rt::test]
@ -217,10 +200,7 @@ async fn error_api_key_empty_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
// patch
@ -237,10 +217,7 @@ async fn error_api_key_empty_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
}
@ -266,10 +243,7 @@ async fn error_api_key_malformed_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
assert_eq!(
response["message"],
json!(
@ -291,10 +265,7 @@ async fn error_api_key_malformed_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
assert_eq!(
response["message"],
json!(

View File

@ -1,12 +1,13 @@
use crate::common::Server;
use std::collections::HashMap;
use ::time::format_description::well_known::Rfc3339;
use maplit::hashmap;
use once_cell::sync::Lazy;
use serde_json::{json, Value};
use std::collections::HashMap;
use time::{Duration, OffsetDateTime};
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
use crate::common::Server;
fn generate_tenant_token(
parent_uid: impl AsRef<str>,
@ -17,11 +18,7 @@ fn generate_tenant_token(
let parent_uid = parent_uid.as_ref();
body.insert("apiKeyUid", json!(parent_uid));
encode(
&Header::default(),
&body,
&EncodingKey::from_secret(parent_key.as_ref().as_bytes()),
)
encode(&Header::default(), &body, &EncodingKey::from_secret(parent_key.as_ref().as_bytes()))
.unwrap()
}
@ -513,18 +510,14 @@ async fn error_access_expired_parent_key() {
server.use_api_key(&web_token);
// test search request while parent_key is not expired
let (response, code) = server
.dummy_request("POST", "/indexes/products/search")
.await;
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
assert_ne!(response, INVALID_RESPONSE.clone());
assert_ne!(code, 403);
// wait until the key is expired.
thread::sleep(time::Duration::new(1, 0));
let (response, code) = server
.dummy_request("POST", "/indexes/products/search")
.await;
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
assert_eq!(response, INVALID_RESPONSE.clone());
assert_eq!(code, 403);
}
@ -556,9 +549,7 @@ async fn error_access_modified_token() {
server.use_api_key(&web_token);
// test search request while web_token is valid
let (response, code) = server
.dummy_request("POST", "/indexes/products/search")
.await;
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
assert_ne!(response, INVALID_RESPONSE.clone());
assert_ne!(code, 403);
@ -576,9 +567,7 @@ async fn error_access_modified_token() {
.join(".");
server.use_api_key(&altered_token);
let (response, code) = server
.dummy_request("POST", "/indexes/products/search")
.await;
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
assert_eq!(response, INVALID_RESPONSE.clone());
assert_eq!(code, 403);
}

View File

@ -1,9 +1,10 @@
use std::io::{Read, Write};
use actix_http::header::TryIntoHeaderPair;
use bytes::Bytes;
use flate2::read::{GzDecoder, ZlibDecoder};
use flate2::write::{GzEncoder, ZlibEncoder};
use flate2::Compression;
use std::io::{Read, Write};
#[derive(Clone, Copy)]
pub enum Encoder {
@ -18,24 +19,18 @@ impl Encoder {
match self {
Self::Gzip => {
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder
.write_all(&body.into())
.expect("Failed to encode request body");
encoder.write_all(&body.into()).expect("Failed to encode request body");
encoder.finish().expect("Failed to encode request body")
}
Self::Deflate => {
let mut encoder = ZlibEncoder::new(Vec::new(), Compression::default());
encoder
.write_all(&body.into())
.expect("Failed to encode request body");
encoder.write_all(&body.into()).expect("Failed to encode request body");
encoder.finish().unwrap()
}
Self::Plain => Vec::from(body.into()),
Self::Brotli => {
let mut encoder = brotli::CompressorWriter::new(Vec::new(), 32 * 1024, 3, 22);
encoder
.write_all(&body.into())
.expect("Failed to encode request body");
encoder.write_all(&body.into()).expect("Failed to encode request body");
encoder.flush().expect("Failed to encode request body");
encoder.into_inner()
}
@ -57,9 +52,7 @@ impl Encoder {
.expect("Invalid zlib stream");
}
Self::Plain => {
buffer
.write_all(input.as_ref())
.expect("Unexpected memory copying issue");
buffer.write_all(input.as_ref()).expect("Unexpected memory copying issue");
}
Self::Brotli => {
brotli::Decompressor::new(input.as_ref(), 4096)
@ -80,8 +73,6 @@ impl Encoder {
}
pub fn iterator() -> impl Iterator<Item = Self> {
[Self::Plain, Self::Gzip, Self::Deflate, Self::Brotli]
.iter()
.copied()
[Self::Plain, Self::Gzip, Self::Deflate, Self::Brotli].iter().copied()
}
}

View File

@ -1,17 +1,14 @@
use std::{
fmt::Write,
panic::{catch_unwind, resume_unwind, UnwindSafe},
time::Duration,
};
use std::fmt::Write;
use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
use std::time::Duration;
use actix_web::http::StatusCode;
use serde_json::{json, Value};
use tokio::time::sleep;
use urlencoding::encode as urlencode;
use super::service::Service;
use super::encoder::Encoder;
use super::service::Service;
pub struct Index<'a> {
pub uid: String,
@ -28,10 +25,8 @@ impl Index<'_> {
pub async fn load_test_set(&self) -> u64 {
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
let (response, code) = self
.service
.post_str(url, include_str!("../assets/test_set.json"))
.await;
let (response, code) =
self.service.post_str(url, include_str!("../assets/test_set.json")).await;
assert_eq!(code, 202);
let update_id = response["taskUid"].as_i64().unwrap();
self.wait_task(update_id as u64).await;
@ -43,9 +38,7 @@ impl Index<'_> {
"uid": self.uid,
"primaryKey": primary_key,
});
self.service
.post_encoded("/indexes", body, self.encoder)
.await
self.service.post_encoded("/indexes", body, self.encoder).await
}
pub async fn update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
@ -68,16 +61,12 @@ impl Index<'_> {
primary_key: Option<&str>,
) -> (Value, StatusCode) {
let url = match primary_key {
Some(key) => format!(
"/indexes/{}/documents?primaryKey={}",
urlencode(self.uid.as_ref()),
key
),
Some(key) => {
format!("/indexes/{}/documents?primaryKey={}", urlencode(self.uid.as_ref()), key)
}
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
};
self.service
.post_encoded(url, documents, self.encoder)
.await
self.service.post_encoded(url, documents, self.encoder).await
}
pub async fn update_documents(
@ -86,11 +75,9 @@ impl Index<'_> {
primary_key: Option<&str>,
) -> (Value, StatusCode) {
let url = match primary_key {
Some(key) => format!(
"/indexes/{}/documents?primaryKey={}",
urlencode(self.uid.as_ref()),
key
),
Some(key) => {
format!("/indexes/{}/documents?primaryKey={}", urlencode(self.uid.as_ref()), key)
}
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
};
self.service.put_encoded(url, documents, self.encoder).await
@ -174,13 +161,8 @@ impl Index<'_> {
}
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/documents/delete-batch",
urlencode(self.uid.as_ref())
);
self.service
.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder)
.await
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
self.service.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder).await
}
pub async fn settings(&self) -> (Value, StatusCode) {
@ -190,9 +172,7 @@ impl Index<'_> {
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
self.service
.patch_encoded(url, settings, self.encoder)
.await
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn delete_settings(&self) -> (Value, StatusCode) {
@ -232,29 +212,19 @@ impl Index<'_> {
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
let params = yaup::to_string(&query).unwrap();
let url = format!(
"/indexes/{}/search?{}",
urlencode(self.uid.as_ref()),
params
);
let url = format!("/indexes/{}/search?{}", urlencode(self.uid.as_ref()), params);
self.service.get(url).await
}
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/settings/{}",
urlencode(self.uid.as_ref()),
"distinct-attribute"
);
let url =
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
self.service.put_encoded(url, value, self.encoder).await
}
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/settings/{}",
urlencode(self.uid.as_ref()),
"distinct-attribute"
);
let url =
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
self.service.get(url).await
}
}

View File

@ -15,18 +15,10 @@ macro_rules! test_post_get_search {
let get_query: meilisearch_http::routes::search::SearchQuery = post_query.into();
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
let ($response, $status_code) = $server.search_get(&get_query).await;
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
panic!(
"panic in get route: {:?}",
e.downcast_ref::<&str>().unwrap()
)
});
let _ = ::std::panic::catch_unwind(|| $block)
.map_err(|e| panic!("panic in get route: {:?}", e.downcast_ref::<&str>().unwrap()));
let ($response, $status_code) = $server.search_post($query).await;
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
panic!(
"panic in post route: {:?}",
e.downcast_ref::<&str>().unwrap()
)
});
let _ = ::std::panic::catch_unwind(|| $block)
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
};
}

View File

@ -1,23 +1,22 @@
#![allow(dead_code)]
use actix_http::body::MessageBody;
use actix_web::dev::ServiceResponse;
use clap::Parser;
use std::path::Path;
use std::sync::Arc;
use actix_http::body::MessageBody;
use actix_web::dev::ServiceResponse;
use actix_web::http::StatusCode;
use byte_unit::{Byte, ByteUnit};
use clap::Parser;
use meilisearch_http::option::{IndexerOpts, MaxMemory, Opt};
use meilisearch_http::{analytics, create_app, setup_meilisearch};
use once_cell::sync::Lazy;
use serde_json::Value;
use tempfile::TempDir;
use meilisearch_http::option::{IndexerOpts, MaxMemory, Opt};
use meilisearch_http::{analytics, create_app, setup_meilisearch};
use crate::common::encoder::Encoder;
use super::index::Index;
use super::service::Service;
use crate::common::encoder::Encoder;
pub struct Server {
pub service: Service,
@ -40,17 +39,10 @@ impl Server {
let options = default_settings(dir.path());
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
let service = Service {
index_scheduler: Arc::new(index_scheduler),
auth,
options,
api_key: None,
};
let service =
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
Server {
service,
_dir: Some(dir),
}
Server { service, _dir: Some(dir) }
}
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
@ -63,17 +55,10 @@ impl Server {
options.master_key = Some("MASTER_KEY".to_string());
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
let service = Service {
index_scheduler: Arc::new(index_scheduler),
auth,
options,
api_key: None,
};
let service =
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
Server {
service,
_dir: Some(dir),
}
Server { service, _dir: Some(dir) }
}
pub async fn new_auth() -> Self {
@ -84,17 +69,10 @@ impl Server {
pub async fn new_with_options(options: Opt) -> Result<Self, anyhow::Error> {
let (index_scheduler, auth) = setup_meilisearch(&options)?;
let service = Service {
index_scheduler: Arc::new(index_scheduler),
auth,
options,
api_key: None,
};
let service =
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
Ok(Server {
service,
_dir: None,
})
Ok(Server { service, _dir: None })
}
pub async fn init_web_app(
@ -120,11 +98,7 @@ impl Server {
}
pub fn index_with_encoder(&self, uid: impl AsRef<str>, encoder: Encoder) -> Index<'_> {
Index {
uid: uid.as_ref().to_string(),
service: &self.service,
encoder,
}
Index { uid: uid.as_ref().to_string(), service: &self.service, encoder }
}
pub async fn list_indexes(
@ -142,9 +116,7 @@ impl Server {
.map(|(offset, limit)| format!("{offset}&{limit}"))
.or_else(|| offset.xor(limit));
if let Some(query_parameter) = query_parameter {
self.service
.get(format!("/indexes?{query_parameter}"))
.await
self.service.get(format!("/indexes?{query_parameter}")).await
} else {
self.service.get("/indexes").await
}

View File

@ -1,14 +1,15 @@
use std::sync::Arc;
use actix_web::http::header::ContentType;
use actix_web::http::StatusCode;
use actix_web::test;
use actix_web::test::TestRequest;
use actix_web::{http::StatusCode, test};
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_http::{analytics, create_app, Opt};
use serde_json::Value;
use crate::common::encoder::Encoder;
use meilisearch_http::{analytics, create_app, Opt};
pub struct Service {
pub index_scheduler: Arc<IndexScheduler>,

View File

@ -2,10 +2,11 @@
mod common;
use crate::common::Server;
use actix_web::test;
use serde_json::{json, Value};
use crate::common::Server;
enum HttpVerb {
Put,
Patch,
@ -75,11 +76,7 @@ async fn error_json_bad_content_type() {
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
// No content-type.
let req = verb
.test_request()
.uri(route)
.set_payload(document)
.to_request();
let req = verb.test_request().uri(route).set_payload(document).to_request();
let res = test::call_service(&app, req).await;
let status_code = res.status();
let body = test::read_body(res).await;

View File

@ -1,9 +1,10 @@
use crate::common::{GetAllDocumentsOptions, Server};
use actix_web::test;
use serde_json::{json, Value};
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::encoder::Encoder;
use serde_json::{json, Value};
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
use crate::common::{GetAllDocumentsOptions, Server};
/// This is the basic usage of our API and every other tests uses the content-type application/json
#[actix_rt::test]
@ -192,10 +193,7 @@ async fn error_add_documents_test_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// put
let req = test::TestRequest::put()
@ -216,10 +214,7 @@ async fn error_add_documents_test_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
/// missing content-type must be refused
@ -253,10 +248,7 @@ async fn error_add_documents_test_no_content_type() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#missing_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
// put
let req = test::TestRequest::put()
@ -276,10 +268,7 @@ async fn error_add_documents_test_no_content_type() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#missing_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
}
#[actix_rt::test]
@ -308,10 +297,7 @@ async fn error_add_malformed_csv_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@ -332,10 +318,7 @@ async fn error_add_malformed_csv_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@ -364,10 +347,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@ -388,10 +368,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// truncate
@ -416,10 +393,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// add one more char to the long string to test if the truncating works.
let document = format!("\"{}m\"", long);
@ -438,10 +412,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@ -470,10 +441,7 @@ async fn error_add_malformed_ndjson_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@ -492,10 +460,7 @@ async fn error_add_malformed_ndjson_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@ -519,10 +484,7 @@ async fn error_add_missing_payload_csv_documents() {
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@ -538,10 +500,7 @@ async fn error_add_missing_payload_csv_documents() {
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@ -565,10 +524,7 @@ async fn error_add_missing_payload_json_documents() {
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@ -584,10 +540,7 @@ async fn error_add_missing_payload_json_documents() {
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@ -608,16 +561,10 @@ async fn error_add_missing_payload_ndjson_documents() {
let body = test::read_body(res).await;
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
assert_eq!(status_code, 400);
assert_eq!(
response["message"],
json!(r#"A ndjson payload is missing."#)
);
assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@ -630,16 +577,10 @@ async fn error_add_missing_payload_ndjson_documents() {
let body = test::read_body(res).await;
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
assert_eq!(status_code, 400);
assert_eq!(
response["message"],
json!(r#"A ndjson payload is missing."#)
);
assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@ -792,10 +733,7 @@ async fn add_larger_dataset() {
assert_eq!(response["details"]["indexedDocuments"], 77);
assert_eq!(response["details"]["receivedDocuments"], 77);
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
limit: Some(1000),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() })
.await;
assert_eq!(code, 200, "failed with `{}`", response);
assert_eq!(response["results"].as_array().unwrap().len(), 77);
@ -900,9 +838,7 @@ async fn add_documents_invalid_geo_field() {
let server = Server::new().await;
let index = server.index("test");
index.create(Some("id")).await;
index
.update_settings(json!({"sortableAttributes": ["_geo"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["_geo"]})).await;
let documents = json!([
{
@ -1045,10 +981,7 @@ async fn batch_several_documents_addition() {
// Check if there are exactly 120 documents (150 - 30) in the index;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
limit: Some(200),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { limit: Some(200), ..Default::default() })
.await;
assert_eq!(code, 200, "failed with `{}`", response);
assert_eq!(response["results"].as_array().unwrap().len(), 120);

View File

@ -29,9 +29,7 @@ async fn delete_one_unexisting_document() {
async fn delete_one_document() {
let server = Server::new().await;
let index = server.index("test");
index
.add_documents(json!([{ "id": 0, "content": "foobar" }]), None)
.await;
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(0).await;
let (_response, code) = server.index("test").delete_document(0).await;
assert_eq!(code, 202);
@ -68,9 +66,7 @@ async fn clear_all_documents() {
assert_eq!(code, 202);
let _update = index.wait_task(1).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
}
@ -85,9 +81,7 @@ async fn clear_all_documents_empty_index() {
assert_eq!(code, 202);
let _update = index.wait_task(0).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
}
@ -121,9 +115,7 @@ async fn delete_batch() {
assert_eq!(code, 202);
let _update = index.wait_task(1).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
assert_eq!(response["results"][0]["id"], json!(3));
@ -139,9 +131,7 @@ async fn delete_no_document_batch() {
assert_eq!(code, 202, "{}", _response);
let _update = index.wait_task(1).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
}

View File

@ -1,11 +1,11 @@
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
use actix_web::test;
use http::header::ACCEPT_ENCODING;
use crate::common::encoder::Encoder;
use serde_json::{json, Value};
use urlencoding::encode as urlencode;
use crate::common::encoder::Encoder;
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
// transplant
#[actix_rt::test]
@ -58,14 +58,8 @@ async fn get_document() {
})
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["id"]),
}),
)
.await;
let (response, code) =
index.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["id"]) })).await;
assert_eq!(code, 200);
assert_eq!(
response,
@ -75,12 +69,7 @@ async fn get_document() {
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["nested.content"]),
}),
)
.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["nested.content"]) }))
.await;
assert_eq!(code, 200);
assert_eq!(
@ -94,10 +83,8 @@ async fn get_document() {
#[actix_rt::test]
async fn error_get_unexisting_index_all_documents() {
let server = Server::new().await;
let (response, code) = server
.index("test")
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) =
server.index("test").get_all_documents(GetAllDocumentsOptions::default()).await;
let expected_response = json!({
"message": "Index `test` not found.",
@ -119,9 +106,7 @@ async fn get_no_document() {
index.wait_task(0).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
}
@ -132,9 +117,7 @@ async fn get_all_documents_no_options() {
let index = server.index("test");
index.load_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 20);
@ -192,10 +175,7 @@ async fn test_get_all_documents_limit() {
index.load_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
limit: Some(5),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { limit: Some(5), ..Default::default() })
.await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 5);
@ -212,10 +192,7 @@ async fn test_get_all_documents_offset() {
index.load_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
offset: Some(5),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { offset: Some(5), ..Default::default() })
.await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
@ -338,24 +315,12 @@ async fn get_document_s_nested_attributes_to_retrieve() {
assert_eq!(code, 202);
index.wait_task(1).await;
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["content"]),
}),
)
.await;
let (response, code) =
index.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["content"]) })).await;
assert_eq!(code, 200);
assert_eq!(response, json!({}));
let (response, code) = index
.get_document(
1,
Some(GetDocumentOptions {
fields: Some(vec!["content"]),
}),
)
.await;
let (response, code) =
index.get_document(1, Some(GetDocumentOptions { fields: Some(vec!["content"]) })).await;
assert_eq!(code, 200);
assert_eq!(
response,
@ -368,12 +333,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["content.truc"]),
}),
)
.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["content.truc"]) }))
.await;
assert_eq!(code, 200);
assert_eq!(
@ -383,12 +343,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
})
);
let (response, code) = index
.get_document(
1,
Some(GetDocumentOptions {
fields: Some(vec!["content.truc"]),
}),
)
.get_document(1, Some(GetDocumentOptions { fields: Some(vec!["content.truc"]) }))
.await;
assert_eq!(code, 200);
assert_eq!(
@ -405,20 +360,13 @@ async fn get_document_s_nested_attributes_to_retrieve() {
async fn get_documents_displayed_attributes_is_ignored() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"displayedAttributes": ["gender"]}))
.await;
index.update_settings(json!({"displayedAttributes": ["gender"]})).await;
index.load_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
assert_eq!(
response["results"][0].as_object().unwrap().keys().count(),
16
);
assert_eq!(response["results"][0].as_object().unwrap().keys().count(), 16);
assert!(response["results"][0]["gender"] != json!(null));
assert_eq!(response["offset"], json!(0));

View File

@ -1,7 +1,7 @@
use crate::common::{GetAllDocumentsOptions, Server};
use serde_json::json;
use crate::common::encoder::Encoder;
use serde_json::json;
use crate::common::{GetAllDocumentsOptions, Server};
#[actix_rt::test]
async fn error_document_update_create_index_bad_uid() {
@ -84,10 +84,7 @@ async fn update_document() {
let (response, code) = index.get_document(1, None).await;
assert_eq!(code, 200);
assert_eq!(
response.to_string(),
r##"{"doc_id":1,"content":"foo","other":"bar"}"##
);
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
}
#[actix_rt::test]
@ -125,10 +122,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.get_document(1, None).await;
assert_eq!(code, 200);
assert_eq!(
response.to_string(),
r##"{"doc_id":1,"content":"foo","other":"bar"}"##
);
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
}
#[actix_rt::test]
@ -143,10 +137,7 @@ async fn update_larger_dataset() {
assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 77);
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
limit: Some(1000),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() })
.await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 77);

View File

@ -1,10 +1,10 @@
mod data;
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
use meilisearch_http::Opt;
use serde_json::json;
use self::data::GetDump;
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
// all the following test are ignored on windows. See #2364
#[actix_rt::test]
@ -17,14 +17,8 @@ async fn import_dump_v1() {
GetDump::MoviesWithSettingsV1.path(),
GetDump::RubyGemsWithSettingsV1.path(),
] {
let options = Opt {
import_dump: Some(path),
..default_settings(temp.path())
};
let error = Server::new_with_options(options)
.await
.map(drop)
.unwrap_err();
let options = Opt { import_dump: Some(path), ..default_settings(temp.path()) };
let error = Server::new_with_options(options).await.map(drop).unwrap_err();
assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.");
}
@ -35,10 +29,8 @@ async fn import_dump_v1() {
async fn import_dump_v2_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV2.path()),
..default_settings(temp.path())
};
let options =
Opt { import_dump: Some(GetDump::MoviesRawV2.path()), ..default_settings(temp.path()) };
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
@ -227,10 +219,8 @@ async fn import_dump_v2_rubygems_with_settings() {
async fn import_dump_v3_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV3.path()),
..default_settings(temp.path())
};
let options =
Opt { import_dump: Some(GetDump::MoviesRawV3.path()), ..default_settings(temp.path()) };
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
@ -419,10 +409,8 @@ async fn import_dump_v3_rubygems_with_settings() {
async fn import_dump_v4_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV4.path()),
..default_settings(temp.path())
};
let options =
Opt { import_dump: Some(GetDump::MoviesRawV4.path()), ..default_settings(temp.path()) };
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
@ -611,10 +599,8 @@ async fn import_dump_v4_rubygems_with_settings() {
async fn import_dump_v5() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::TestV5.path()),
..default_settings(temp.path())
};
let options =
Opt { import_dump: Some(GetDump::TestV5.path()), ..default_settings(temp.path()) };
let mut server = Server::new_auth_with_options(options, temp).await;
server.use_api_key("MASTER_KEY");
@ -654,14 +640,10 @@ async fn import_dump_v5() {
assert_eq!(code, 200);
assert_eq!(stats, expected_stats);
let (docs, code) = index2
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (docs, code) = index2.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
let (docs, code) = index1
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (docs, code) = index1.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(docs["results"].as_array().unwrap().len(), 10);

View File

@ -1,10 +1,11 @@
use crate::common::encoder::Encoder;
use crate::common::Server;
use actix_web::http::header::ContentType;
use actix_web::test;
use http::header::ACCEPT_ENCODING;
use serde_json::{json, Value};
use crate::common::encoder::Encoder;
use crate::common::Server;
#[actix_rt::test]
async fn create_index_no_primary_key() {
let server = Server::new().await;

View File

@ -1,6 +1,6 @@
use serde_json::{json, Value};
use crate::common::Server;
use serde_json::json;
use serde_json::Value;
#[actix_rt::test]
async fn create_and_get_index() {
@ -63,12 +63,8 @@ async fn list_multiple_indexes() {
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 2);
assert!(arr
.iter()
.any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
assert!(arr
.iter()
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
}
#[actix_rt::test]
@ -77,10 +73,7 @@ async fn get_and_paginate_indexes() {
const NB_INDEXES: usize = 50;
for i in 0..NB_INDEXES {
server.index(&format!("test_{i:02}")).create(None).await;
server
.index(&format!("test_{i:02}"))
.wait_task(i as u64)
.await;
server.index(&format!("test_{i:02}")).wait_task(i as u64).await;
}
// basic

View File

@ -17,10 +17,7 @@ async fn stats() {
assert_eq!(code, 200);
assert_eq!(response["numberOfDocuments"], 0);
assert!(response["isIndexing"] == false);
assert!(response["fieldDistribution"]
.as_object()
.unwrap()
.is_empty());
assert!(response["fieldDistribution"].as_object().unwrap().is_empty());
let documents = json!([
{

View File

@ -1,7 +1,9 @@
use serde_json::json;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::encoder::Encoder;
use crate::common::Server;
use serde_json::json;
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
#[actix_rt::test]
async fn update_primary_key() {

View File

@ -1,7 +1,7 @@
use crate::common::Server;
use serde_json::json;
use super::DOCUMENTS;
use crate::common::Server;
#[actix_rt::test]
async fn search_unexisting_index() {
@ -45,16 +45,14 @@ async fn search_invalid_highlight_and_crop_tags() {
for field in fields {
// object
let (response, code) = index
.search_post(json!({field.to_string(): {"marker": "<crop>"}}))
.await;
let (response, code) =
index.search_post(json!({field.to_string(): {"marker": "<crop>"}})).await;
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
assert_eq!(response["code"], "bad_request");
// array
let (response, code) = index
.search_post(json!({field.to_string(): ["marker", "<crop>"]}))
.await;
let (response, code) =
index.search_post(json!({field.to_string(): ["marker", "<crop>"]})).await;
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
assert_eq!(response["code"], "bad_request");
}
@ -65,9 +63,7 @@ async fn filter_invalid_syntax_object() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -92,9 +88,7 @@ async fn filter_invalid_syntax_array() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -119,9 +113,7 @@ async fn filter_invalid_syntax_string() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -134,13 +126,10 @@ async fn filter_invalid_syntax_string() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(
json!({"filter": "title = Glass XOR title = Glass"}),
|response, code| {
.search(json!({"filter": "title = Glass XOR title = Glass"}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
},
)
})
.await;
}
@ -149,9 +138,7 @@ async fn filter_invalid_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -176,9 +163,7 @@ async fn filter_invalid_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -203,9 +188,7 @@ async fn filter_reserved_geo_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -230,9 +213,7 @@ async fn filter_reserved_geo_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -257,9 +238,7 @@ async fn filter_reserved_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -272,13 +251,10 @@ async fn filter_reserved_attribute_array() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(
json!({"filter": ["_geoDistance = Glass"]}),
|response, code| {
.search(json!({"filter": ["_geoDistance = Glass"]}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
},
)
})
.await;
}
@ -287,9 +263,7 @@ async fn filter_reserved_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -302,13 +276,10 @@ async fn filter_reserved_attribute_string() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(
json!({"filter": "_geoDistance = Glass"}),
|response, code| {
.search(json!({"filter": "_geoDistance = Glass"}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
},
)
})
.await;
}
@ -317,9 +288,7 @@ async fn sort_geo_reserved_attribute() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -349,9 +318,7 @@ async fn sort_reserved_attribute() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -381,9 +348,7 @@ async fn sort_unsortable_attribute() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -413,9 +378,7 @@ async fn sort_invalid_syntax() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;

View File

@ -1,15 +1,14 @@
use serde_json::json;
use super::*;
use crate::common::Server;
use serde_json::json;
#[actix_rt::test]
async fn formatted_contain_wildcard() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({ "displayedAttributes": ["id", "cattos"] }))
.await;
index.update_settings(json!({ "displayedAttributes": ["id", "cattos"] })).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -34,9 +33,7 @@ async fn formatted_contain_wildcard() {
.await;
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["*"] }),
|response, code| {
.search(json!({ "q": "pesti", "attributesToRetrieve": ["*"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -45,8 +42,7 @@ async fn formatted_contain_wildcard() {
"cattos": "pesti",
})
);
},
)
})
.await;
index
@ -91,9 +87,7 @@ async fn formatted_contain_wildcard() {
.await;
index
.search(
json!({ "q": "pesti", "attributesToCrop": ["*"] }),
|response, code| {
.search(json!({ "q": "pesti", "attributesToCrop": ["*"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -106,8 +100,7 @@ async fn formatted_contain_wildcard() {
}
})
);
},
)
})
.await;
}
@ -121,9 +114,7 @@ async fn format_nested() {
index.wait_task(0).await;
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }),
|response, code| {
.search(json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -140,8 +131,7 @@ async fn format_nested() {
],
})
);
},
)
})
.await;
index
@ -297,9 +287,7 @@ async fn displayedattr_2_smol() {
let index = server.index("test");
// not enough displayed for the other settings
index
.update_settings(json!({ "displayedAttributes": ["id"] }))
.await;
index.update_settings(json!({ "displayedAttributes": ["id"] })).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -319,9 +307,7 @@ async fn displayedattr_2_smol() {
.await;
index
.search(
json!({ "attributesToRetrieve": ["id"] }),
|response, code| {
.search(json!({ "attributesToRetrieve": ["id"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -329,14 +315,11 @@ async fn displayedattr_2_smol() {
"id": 852,
})
);
},
)
})
.await;
index
.search(
json!({ "attributesToHighlight": ["id"] }),
|response, code| {
.search(json!({ "attributesToHighlight": ["id"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -347,8 +330,7 @@ async fn displayedattr_2_smol() {
}
})
);
},
)
})
.await;
index
@ -385,9 +367,7 @@ async fn displayedattr_2_smol() {
.await;
index
.search(
json!({ "attributesToHighlight": ["cattos"] }),
|response, code| {
.search(json!({ "attributesToHighlight": ["cattos"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -395,14 +375,11 @@ async fn displayedattr_2_smol() {
"id": 852,
})
);
},
)
})
.await;
index
.search(
json!({ "attributesToCrop": ["cattos"] }),
|response, code| {
.search(json!({ "attributesToCrop": ["cattos"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -410,18 +387,14 @@ async fn displayedattr_2_smol() {
"id": 852,
})
);
},
)
})
.await;
index
.search(
json!({ "attributesToRetrieve": ["cattos"] }),
|response, code| {
.search(json!({ "attributesToRetrieve": ["cattos"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"][0], json!({}));
},
)
})
.await;
index

View File

@ -5,10 +5,11 @@ mod errors;
mod formatted;
mod pagination;
use crate::common::Server;
use once_cell::sync::Lazy;
use serde_json::{json, Value};
use crate::common::Server;
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
@ -199,9 +200,7 @@ async fn search_with_filter_string_notation() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -221,9 +220,7 @@ async fn search_with_filter_string_notation() {
let index = server.index("nested");
index
.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -262,9 +259,7 @@ async fn search_with_filter_array_notation() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -292,9 +287,7 @@ async fn search_with_sort_on_numbers() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -314,9 +307,7 @@ async fn search_with_sort_on_numbers() {
let index = server.index("nested");
index
.update_settings(json!({"sortableAttributes": ["doggos.age"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["doggos.age"]})).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -340,9 +331,7 @@ async fn search_with_sort_on_strings() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["title"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -362,9 +351,7 @@ async fn search_with_sort_on_strings() {
let index = server.index("nested");
index
.update_settings(json!({"sortableAttributes": ["doggos.name"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["doggos.name"]})).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -388,9 +375,7 @@ async fn search_with_multiple_sort() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id", "title"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id", "title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -410,9 +395,7 @@ async fn search_facet_distribution() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -434,9 +417,7 @@ async fn search_facet_distribution() {
let index = server.index("nested");
index
.update_settings(json!({"filterableAttributes": ["father", "doggos.name"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["father", "doggos.name"]})).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -467,9 +448,7 @@ async fn search_facet_distribution() {
)
.await;
index
.update_settings(json!({"filterableAttributes": ["doggos"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["doggos"]})).await;
index.wait_task(4).await;
index
@ -502,10 +481,7 @@ async fn search_facet_distribution() {
dist["doggos.name"],
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
);
assert_eq!(
dist["doggos.age"],
json!({ "2": 1, "4": 1, "5": 1, "6": 1, "8": 1})
);
assert_eq!(dist["doggos.age"], json!({ "2": 1, "4": 1, "5": 1, "6": 1, "8": 1}));
},
)
.await;
@ -516,17 +492,14 @@ async fn displayed_attributes() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({ "displayedAttributes": ["title"] }))
.await;
index.update_settings(json!({ "displayedAttributes": ["title"] })).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (response, code) = index
.search_post(json!({ "attributesToRetrieve": ["title", "id"] }))
.await;
let (response, code) =
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
assert_eq!(code, 200, "{}", response);
assert!(response["hits"][0].get("title").is_some());
}
@ -536,9 +509,7 @@ async fn placeholder_search_is_hard_limited() {
let server = Server::new().await;
let index = server.index("test");
let documents: Vec<_> = (0..1200)
.map(|i| json!({ "id": i, "text": "I am unique!" }))
.collect();
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
index.add_documents(documents.into(), None).await;
index.wait_task(0).await;
@ -567,9 +538,7 @@ async fn placeholder_search_is_hard_limited() {
)
.await;
index
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
.await;
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(1).await;
index
@ -603,9 +572,7 @@ async fn search_is_hard_limited() {
let server = Server::new().await;
let index = server.index("test");
let documents: Vec<_> = (0..1200)
.map(|i| json!({ "id": i, "text": "I am unique!" }))
.collect();
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
index.add_documents(documents.into(), None).await;
index.wait_task(0).await;
@ -636,9 +603,7 @@ async fn search_is_hard_limited() {
)
.await;
index
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
.await;
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(1).await;
index
@ -674,13 +639,9 @@ async fn faceting_max_values_per_facet() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({ "filterableAttributes": ["number"] }))
.await;
index.update_settings(json!({ "filterableAttributes": ["number"] })).await;
let documents: Vec<_> = (0..10_000)
.map(|id| json!({ "id": id, "number": id * 10 }))
.collect();
let documents: Vec<_> = (0..10_000).map(|id| json!({ "id": id, "number": id * 10 })).collect();
index.add_documents(json!(documents), None).await;
index.wait_task(1).await;
@ -697,9 +658,7 @@ async fn faceting_max_values_per_facet() {
)
.await;
index
.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } }))
.await;
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
index.wait_task(2).await;
index

View File

@ -1,23 +1,20 @@
use crate::common::Server;
use serde_json::json;
use crate::common::Server;
#[actix_rt::test]
async fn set_and_reset_distinct_attribute() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index
.update_settings(json!({ "distinctAttribute": "test"}))
.await;
let (_response, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(0).await;
let (response, _) = index.settings().await;
assert_eq!(response["distinctAttribute"], "test");
index
.update_settings(json!({ "distinctAttribute": null }))
.await;
index.update_settings(json!({ "distinctAttribute": null })).await;
index.wait_task(1).await;

View File

@ -13,14 +13,7 @@ static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|
map.insert("distinct_attribute", json!(Value::Null));
map.insert(
"ranking_rules",
json!([
"words",
"typo",
"proximity",
"attribute",
"sort",
"exactness"
]),
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
);
map.insert("stop_words", json!([]));
map.insert("synonyms", json!({}));
@ -63,14 +56,7 @@ async fn get_settings() {
assert_eq!(settings["distinctAttribute"], json!(null));
assert_eq!(
settings["rankingRules"],
json!([
"words",
"typo",
"proximity",
"attribute",
"sort",
"exactness"
])
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"])
);
assert_eq!(settings["stopWords"], json!([]));
assert_eq!(
@ -99,18 +85,14 @@ async fn error_update_settings_unknown_field() {
async fn test_partial_update() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index
.update_settings(json!({"displayedAttributes": ["foo"]}))
.await;
let (_response, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(0).await;
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["foo"]));
assert_eq!(response["searchableAttributes"], json!(["*"]));
let (_response, _) = index
.update_settings(json!({"searchableAttributes": ["bar"]}))
.await;
let (_response, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
index.wait_task(1).await;
let (response, code) = index.settings().await;
@ -158,10 +140,7 @@ async fn reset_all_settings() {
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
assert_eq!(response["searchableAttributes"], json!(["name"]));
assert_eq!(response["stopWords"], json!(["the"]));
assert_eq!(
response["synonyms"],
json!({"puppy": ["dog", "doggo", "potat"] })
);
assert_eq!(response["synonyms"], json!({"puppy": ["dog", "doggo", "potat"] }));
assert_eq!(response["filterableAttributes"], json!(["age"]));
index.delete_settings().await;
@ -299,9 +278,8 @@ async fn error_set_invalid_ranking_rules() {
let index = server.index("test");
index.create(None).await;
let (_response, _code) = index
.update_settings(json!({ "rankingRules": [ "manyTheFish"]}))
.await;
let (_response, _code) =
index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
index.wait_task(1).await;
let (response, code) = index.get_task(1).await;

View File

@ -1,11 +1,10 @@
use std::time::Duration;
use crate::common::server::default_settings;
use crate::common::GetAllDocumentsOptions;
use crate::common::Server;
use meilisearch_http::Opt;
use tokio::time::sleep;
use meilisearch_http::Opt;
use crate::common::server::default_settings;
use crate::common::{GetAllDocumentsOptions, Server};
macro_rules! verify_snapshot {
(
@ -62,10 +61,7 @@ async fn perform_snapshot() {
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
let options = Opt {
import_snapshot: Some(snapshot_path),
..default_settings(temp.path())
};
let options = Opt { import_snapshot: Some(snapshot_path), ..default_settings(temp.path()) };
let snapshot_server = Server::new_with_options(options).await.unwrap();

View File

@ -1,5 +1,6 @@
use serde_json::json;
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::Server;

View File

@ -1,8 +1,9 @@
use crate::common::Server;
use serde_json::json;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::Server;
#[actix_rt::test]
async fn error_get_unexisting_task_status() {
let server = Server::new().await;
@ -49,10 +50,7 @@ async fn list_tasks() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.list_tasks().await;
assert_eq!(code, 200);
@ -66,10 +64,7 @@ async fn list_tasks_with_star_filters() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.service.get("/tasks?indexUid=test").await;
assert_eq!(code, 200);
@ -87,10 +82,8 @@ async fn list_tasks_with_star_filters() {
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*")
.await;
let (response, code) =
index.service.get("/tasks?type=*,documentAdditionOrUpdate&status=*").await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
@ -116,10 +109,7 @@ async fn list_tasks_status_filtered() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
@ -145,19 +135,15 @@ async fn list_tasks_type_filtered() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index
.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[])
.await;
let (response, code) =
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
@ -169,10 +155,7 @@ async fn list_tasks_status_and_type_filtered() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;

View File

@ -2,14 +2,15 @@ use std::borrow::Borrow;
use std::fmt::{self, Debug, Display};
use std::io::{self, BufReader, Read, Seek, Write};
use crate::error::{Code, ErrorCode};
use crate::internal_error;
use either::Either;
use milli::documents::{DocumentsBatchBuilder, Error};
use milli::Object;
use serde::Deserialize;
use serde_json::error::Category;
use crate::error::{Code, ErrorCode};
use crate::internal_error;
type Result<T> = std::result::Result<T, DocumentFormatError>;
#[derive(Debug)]
@ -105,10 +106,7 @@ pub fn read_csv(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
builder.append_csv(csv).map_err(|e| (PayloadType::Csv, e))?;
let count = builder.documents_count();
let _ = builder
.into_inner()
.map_err(Into::into)
.map_err(DocumentFormatError::Internal)?;
let _ = builder.into_inner().map_err(Into::into).map_err(DocumentFormatError::Internal)?;
Ok(count as usize)
}
@ -119,9 +117,7 @@ pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<usize>
let reader = BufReader::new(input);
for result in serde_json::Deserializer::from_reader(reader).into_iter() {
let object = result
.map_err(Error::Json)
.map_err(|e| (PayloadType::Ndjson, e))?;
let object = result.map_err(Error::Json).map_err(|e| (PayloadType::Ndjson, e))?;
builder
.append_json_object(&object)
.map_err(Into::into)
@ -129,10 +125,7 @@ pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<usize>
}
let count = builder.documents_count();
let _ = builder
.into_inner()
.map_err(Into::into)
.map_err(DocumentFormatError::Internal)?;
let _ = builder.into_inner().map_err(Into::into).map_err(DocumentFormatError::Internal)?;
Ok(count as usize)
}
@ -149,9 +142,8 @@ pub fn read_json(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
inner: Either<Vec<Object>, Object>,
}
let content: ArrayOrSingleObject = serde_json::from_reader(reader)
.map_err(Error::Json)
.map_err(|e| (PayloadType::Json, e))?;
let content: ArrayOrSingleObject =
serde_json::from_reader(reader).map_err(Error::Json).map_err(|e| (PayloadType::Json, e))?;
for object in content.inner.map_right(|o| vec![o]).into_inner() {
builder
@ -161,10 +153,7 @@ pub fn read_json(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
}
let count = builder.documents_count();
let _ = builder
.into_inner()
.map_err(Into::into)
.map_err(DocumentFormatError::Internal)?;
let _ = builder.into_inner().map_err(Into::into).map_err(DocumentFormatError::Internal)?;
Ok(count as usize)
}

View File

@ -1,6 +1,7 @@
use std::fmt;
use actix_web::{self as aweb, http::StatusCode, HttpResponseBuilder};
use actix_web::http::StatusCode;
use actix_web::{self as aweb, HttpResponseBuilder};
use aweb::rt::task::JoinError;
use milli::heed::{Error as HeedError, MdbError};
use serde::{Deserialize, Serialize};
@ -10,10 +11,7 @@ use serde::{Deserialize, Serialize};
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
pub struct ResponseError {
#[serde(skip)]
#[cfg_attr(
feature = "test-traits",
proptest(strategy = "strategy::status_code_strategy()")
)]
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
code: StatusCode,
message: String,
#[serde(rename = "code")]
@ -62,9 +60,7 @@ where
impl aweb::error::ResponseError for ResponseError {
fn error_response(&self) -> aweb::HttpResponse {
let json = serde_json::to_vec(self).unwrap();
HttpResponseBuilder::new(self.status_code())
.content_type("application/json")
.body(json)
HttpResponseBuilder::new(self.status_code()).content_type("application/json").body(json)
}
fn status_code(&self) -> StatusCode {
@ -227,10 +223,9 @@ impl Code {
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
DatabaseSizeLimitReached => ErrCode::internal(
"database_size_limit_reached",
StatusCode::INTERNAL_SERVER_ERROR,
),
DatabaseSizeLimitReached => {
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
}
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
@ -336,27 +331,15 @@ struct ErrCode {
impl ErrCode {
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::AuthenticationError,
}
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
}
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InternalError,
}
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
}
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode {
status_code,
error_name,
error_type: ErrorType::InvalidRequestError,
}
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
}
}

View File

@ -1,8 +1,9 @@
use serde::{Deserialize, Serialize};
use std::error::Error;
use std::fmt;
use std::str::FromStr;
use serde::{Deserialize, Serialize};
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
/// bytes long
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
@ -38,9 +39,7 @@ impl TryFrom<String> for IndexUid {
type Error = IndexUidFormatError;
fn try_from(uid: String) -> Result<Self, Self::Error> {
if !uid
.chars()
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
if !uid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|| uid.is_empty()
|| uid.len() > 400
{

View File

@ -1,15 +1,17 @@
use crate::error::{Code, ErrorCode};
use crate::index_uid::IndexUid;
use crate::star_or::StarOr;
use std::hash::Hash;
use enum_iterator::Sequence;
use serde::{Deserialize, Serialize};
use serde_json::{from_value, Value};
use std::hash::Hash;
use time::format_description::well_known::Rfc3339;
use time::macros::{format_description, time};
use time::{Date, OffsetDateTime, PrimitiveDateTime};
use uuid::Uuid;
use crate::error::{Code, ErrorCode};
use crate::index_uid::IndexUid;
use crate::star_or::StarOr;
type Result<T> = std::result::Result<T, Error>;
pub type KeyId = Uuid;
@ -74,16 +76,7 @@ impl Key {
let created_at = OffsetDateTime::now_utc();
let updated_at = created_at;
Ok(Self {
name,
description,
uid,
actions,
indexes,
expires_at,
created_at,
updated_at,
})
Ok(Self { name, description, uid, actions, indexes, expires_at, created_at, updated_at })
}
pub fn update_from_value(&mut self, value: Value) -> Result<()> {

View File

@ -7,8 +7,7 @@ pub mod star_or;
pub mod tasks;
pub use milli;
pub use milli::heed;
pub use milli::Index;
pub use milli::{heed, Index};
use uuid::Uuid;
pub type Document = serde_json::Map<String, serde_json::Value>;

View File

@ -376,9 +376,8 @@ pub fn settings(
index: &Index,
rtxn: &crate::heed::RoTxn,
) -> Result<Settings<Checked>, milli::Error> {
let displayed_attributes = index
.displayed_fields(rtxn)?
.map(|fields| fields.into_iter().map(String::from).collect());
let displayed_attributes =
index.displayed_fields(rtxn)?.map(|fields| fields.into_iter().map(String::from).collect());
let searchable_attributes = index
.user_defined_searchable_fields(rtxn)?
@ -388,11 +387,7 @@ pub fn settings(
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
let criteria = index
.criteria(rtxn)?
.into_iter()
.map(|c| c.to_string())
.collect();
let criteria = index.criteria(rtxn)?.into_iter().map(|c| c.to_string()).collect();
let stop_words = index
.stop_words(rtxn)?
@ -408,12 +403,7 @@ pub fn settings(
let synonyms = index
.synonyms(rtxn)?
.iter()
.map(|(key, values)| {
(
key.join(" "),
values.iter().map(|value| value.join(" ")).collect(),
)
})
.map(|(key, values)| (key.join(" "), values.iter().map(|value| value.join(" ")).collect()))
.collect();
let min_typo_word_len = MinWordSizeTyposSetting {
@ -426,11 +416,7 @@ pub fn settings(
None => BTreeSet::new(),
};
let disabled_attributes = index
.exact_attributes(rtxn)?
.into_iter()
.map(String::from)
.collect();
let disabled_attributes = index.exact_attributes(rtxn)?.into_iter().map(String::from).collect();
let typo_tolerance = TypoSettings {
enabled: Setting::Set(index.authorize_typos(rtxn)?),
@ -441,17 +427,13 @@ pub fn settings(
let faceting = FacetingSettings {
max_values_per_facet: Setting::Set(
index
.max_values_per_facet(rtxn)?
.unwrap_or(DEFAULT_VALUES_PER_FACET),
index.max_values_per_facet(rtxn)?.unwrap_or(DEFAULT_VALUES_PER_FACET),
),
};
let pagination = PaginationSettings {
max_total_hits: Setting::Set(
index
.pagination_max_total_hits(rtxn)?
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS),
index.pagination_max_total_hits(rtxn)?.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS),
),
};
@ -487,11 +469,7 @@ pub(crate) mod test {
use super::*;
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
prop_oneof![
Just(Setting::NotSet),
Just(Setting::Reset),
any::<T>().prop_map(Setting::Set)
]
prop_oneof![Just(Setting::NotSet), Just(Setting::Reset), any::<T>().prop_map(Setting::Set)]
}
#[test]
@ -514,10 +492,7 @@ pub(crate) mod test {
let checked = settings.clone().check();
assert_eq!(settings.displayed_attributes, checked.displayed_attributes);
assert_eq!(
settings.searchable_attributes,
checked.searchable_attributes
);
assert_eq!(settings.searchable_attributes, checked.searchable_attributes);
// test wildcard
// test no changes

View File

@ -1,10 +1,11 @@
use serde::de::Visitor;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt::{Display, Formatter};
use std::marker::PhantomData;
use std::ops::Deref;
use std::str::FromStr;
use serde::de::Visitor;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
/// A type that tries to match either a star (*) or
/// any other thing that implements `FromStr`.
#[derive(Debug, Clone)]
@ -121,9 +122,10 @@ where
#[cfg(test)]
mod tests {
use super::*;
use serde_json::{json, Value};
use super::*;
#[test]
fn star_or_serde_roundtrip() {
fn roundtrip(content: Value, expected: StarOr<String>) {

View File

@ -1,5 +1,5 @@
use std::fmt::{Display, Write};
use std::collections::HashSet;
use std::fmt::{Display, Write};
use std::str::FromStr;
use enum_iterator::Sequence;
@ -9,12 +9,10 @@ use serde::{Deserialize, Serialize, Serializer};
use time::{Duration, OffsetDateTime};
use uuid::Uuid;
use crate::{
error::{Code, ResponseError},
keys::Key,
settings::{Settings, Unchecked},
InstanceUid,
};
use crate::error::{Code, ResponseError};
use crate::keys::Key;
use crate::settings::{Settings, Unchecked};
use crate::InstanceUid;
pub type TaskId = u32;
@ -66,9 +64,7 @@ impl Task {
/// Return the content-uuid if there is one
pub fn content_uuid(&self) -> Option<&Uuid> {
match self.kind {
KindWithContent::DocumentImport {
ref content_file, ..
} => Some(content_file),
KindWithContent::DocumentImport { ref content_file, .. } => Some(content_file),
KindWithContent::DocumentDeletion { .. }
| KindWithContent::DocumentClear { .. }
| KindWithContent::Settings { .. }
@ -183,33 +179,32 @@ impl KindWithContent {
/// `None` if it cannot be generated.
pub fn default_details(&self) -> Option<Details> {
match self {
KindWithContent::DocumentImport {
documents_count, ..
} => Some(Details::DocumentAddition {
KindWithContent::DocumentImport { documents_count, .. } => {
Some(Details::DocumentAddition {
received_documents: *documents_count,
indexed_documents: None,
}),
KindWithContent::DocumentDeletion {
index_uid: _,
documents_ids,
} => Some(Details::DocumentDeletion {
})
}
KindWithContent::DocumentDeletion { index_uid: _, documents_ids } => {
Some(Details::DocumentDeletion {
received_document_ids: documents_ids.len(),
deleted_documents: None,
}),
KindWithContent::DocumentClear { .. } => Some(Details::ClearAll {
deleted_documents: None,
}),
KindWithContent::Settings { new_settings, .. } => Some(Details::Settings {
settings: new_settings.clone(),
}),
})
}
KindWithContent::DocumentClear { .. } => {
Some(Details::ClearAll { deleted_documents: None })
}
KindWithContent::Settings { new_settings, .. } => {
Some(Details::Settings { settings: new_settings.clone() })
}
KindWithContent::IndexDeletion { .. } => None,
KindWithContent::IndexCreation { primary_key, .. }
| KindWithContent::IndexUpdate { primary_key, .. } => Some(Details::IndexInfo {
primary_key: primary_key.clone(),
}),
KindWithContent::IndexSwap { swaps } => Some(Details::IndexSwap {
swaps: swaps.clone(),
}),
| KindWithContent::IndexUpdate { primary_key, .. } => {
Some(Details::IndexInfo { primary_key: primary_key.clone() })
}
KindWithContent::IndexSwap { swaps } => {
Some(Details::IndexSwap { swaps: swaps.clone() })
}
KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation {
matched_tasks: tasks.len(),
canceled_tasks: None,
@ -227,30 +222,29 @@ impl KindWithContent {
pub fn default_finished_details(&self) -> Option<Details> {
match self {
KindWithContent::DocumentImport {
documents_count, ..
} => Some(Details::DocumentAddition {
KindWithContent::DocumentImport { documents_count, .. } => {
Some(Details::DocumentAddition {
received_documents: *documents_count,
indexed_documents: Some(0),
}),
KindWithContent::DocumentDeletion {
index_uid: _,
documents_ids,
} => Some(Details::DocumentDeletion {
})
}
KindWithContent::DocumentDeletion { index_uid: _, documents_ids } => {
Some(Details::DocumentDeletion {
received_document_ids: documents_ids.len(),
deleted_documents: Some(0),
}),
KindWithContent::DocumentClear { .. } => Some(Details::ClearAll {
deleted_documents: None,
}),
KindWithContent::Settings { new_settings, .. } => Some(Details::Settings {
settings: new_settings.clone(),
}),
})
}
KindWithContent::DocumentClear { .. } => {
Some(Details::ClearAll { deleted_documents: None })
}
KindWithContent::Settings { new_settings, .. } => {
Some(Details::Settings { settings: new_settings.clone() })
}
KindWithContent::IndexDeletion { .. } => None,
KindWithContent::IndexCreation { primary_key, .. }
| KindWithContent::IndexUpdate { primary_key, .. } => Some(Details::IndexInfo {
primary_key: primary_key.clone(),
}),
| KindWithContent::IndexUpdate { primary_key, .. } => {
Some(Details::IndexInfo { primary_key: primary_key.clone() })
}
KindWithContent::IndexSwap { .. } => {
todo!()
}
@ -273,24 +267,24 @@ impl KindWithContent {
impl From<&KindWithContent> for Option<Details> {
fn from(kind: &KindWithContent) -> Self {
match kind {
KindWithContent::DocumentImport {
documents_count, ..
} => Some(Details::DocumentAddition {
KindWithContent::DocumentImport { documents_count, .. } => {
Some(Details::DocumentAddition {
received_documents: *documents_count,
indexed_documents: None,
}),
})
}
KindWithContent::DocumentDeletion { .. } => None,
KindWithContent::DocumentClear { .. } => None,
KindWithContent::Settings { new_settings, .. } => Some(Details::Settings {
settings: new_settings.clone(),
}),
KindWithContent::Settings { new_settings, .. } => {
Some(Details::Settings { settings: new_settings.clone() })
}
KindWithContent::IndexDeletion { .. } => None,
KindWithContent::IndexCreation { primary_key, .. } => Some(Details::IndexInfo {
primary_key: primary_key.clone(),
}),
KindWithContent::IndexUpdate { primary_key, .. } => Some(Details::IndexInfo {
primary_key: primary_key.clone(),
}),
KindWithContent::IndexCreation { primary_key, .. } => {
Some(Details::IndexInfo { primary_key: primary_key.clone() })
}
KindWithContent::IndexUpdate { primary_key, .. } => {
Some(Details::IndexInfo { primary_key: primary_key.clone() })
}
KindWithContent::IndexSwap { .. } => None,
KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation {
matched_tasks: tasks.len(),
@ -302,9 +296,9 @@ impl From<&KindWithContent> for Option<Details> {
deleted_tasks: None,
original_query: query.clone(),
}),
KindWithContent::DumpExport { dump_uid, .. } => Some(Details::Dump {
dump_uid: dump_uid.clone(),
}),
KindWithContent::DumpExport { dump_uid, .. } => {
Some(Details::Dump { dump_uid: dump_uid.clone() })
}
KindWithContent::Snapshot => None,
}
}
@ -514,9 +508,9 @@ pub fn serialize_duration<S: Serializer>(
#[cfg(test)]
mod tests {
use crate::heed::{types::SerdeJson, BytesDecode, BytesEncode};
use super::Details;
use crate::heed::types::SerdeJson;
use crate::heed::{BytesDecode, BytesEncode};
#[test]
fn bad_deser() {

View File

@ -25,11 +25,7 @@ const SPLIT_SYMBOL: char = '.';
/// ```
fn contained_in(selector: &str, key: &str) -> bool {
selector.starts_with(key)
&& selector[key.len()..]
.chars()
.next()
.map(|c| c == SPLIT_SYMBOL)
.unwrap_or(true)
&& selector[key.len()..].chars().next().map(|c| c == SPLIT_SYMBOL).unwrap_or(true)
}
/// Map the selected leaf values of a json allowing you to update only the fields that were selected.
@ -244,10 +240,7 @@ mod tests {
fn test_contained_in() {
assert!(contained_in("animaux", "animaux"));
assert!(contained_in("animaux.chien", "animaux"));
assert!(contained_in(
"animaux.chien.race.bouvier bernois.fourrure.couleur",
"animaux"
));
assert!(contained_in("animaux.chien.race.bouvier bernois.fourrure.couleur", "animaux"));
assert!(contained_in(
"animaux.chien.race.bouvier bernois.fourrure.couleur",
"animaux.chien"
@ -726,14 +719,12 @@ mod tests {
}
});
map_leaf_values(
value.as_object_mut().unwrap(),
["jean.race.name"],
|key, value| match (value, key) {
map_leaf_values(value.as_object_mut().unwrap(), ["jean.race.name"], |key, value| {
match (value, key) {
(Value::String(name), "jean.race.name") => *name = S("patou"),
_ => unreachable!(),
},
);
}
});
assert_eq!(
value,