mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-22 01:57:41 +08:00
Introduce a rustfmt file
This commit is contained in:
parent
52e858a588
commit
80b2e70ee7
5
.rustfmt.toml
Normal file
5
.rustfmt.toml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
unstable_features = true
|
||||||
|
|
||||||
|
use_small_heuristics = "max"
|
||||||
|
imports_granularity = "Module"
|
||||||
|
group_imports = "StdExternalCrate"
|
@ -1,11 +1,9 @@
|
|||||||
use meilisearch_types::{
|
use meilisearch_types::error::ResponseError;
|
||||||
error::ResponseError,
|
use meilisearch_types::keys::Key;
|
||||||
keys::Key,
|
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||||
milli::update::IndexDocumentsMethod,
|
use meilisearch_types::settings::Unchecked;
|
||||||
settings::Unchecked,
|
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task, TaskId};
|
||||||
tasks::{Details, KindWithContent, Status, Task, TaskId},
|
use meilisearch_types::InstanceUid;
|
||||||
InstanceUid,
|
|
||||||
};
|
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
@ -168,15 +166,8 @@ impl From<KindWithContent> for KindDump {
|
|||||||
}
|
}
|
||||||
KindWithContent::DocumentClear { .. } => KindDump::DocumentClear,
|
KindWithContent::DocumentClear { .. } => KindDump::DocumentClear,
|
||||||
KindWithContent::Settings {
|
KindWithContent::Settings {
|
||||||
new_settings,
|
new_settings, is_deletion, allow_index_creation, ..
|
||||||
is_deletion,
|
} => KindDump::Settings { settings: new_settings, is_deletion, allow_index_creation },
|
||||||
allow_index_creation,
|
|
||||||
..
|
|
||||||
} => KindDump::Settings {
|
|
||||||
settings: new_settings,
|
|
||||||
is_deletion,
|
|
||||||
allow_index_creation,
|
|
||||||
},
|
|
||||||
KindWithContent::IndexDeletion { .. } => KindDump::IndexDeletion,
|
KindWithContent::IndexDeletion { .. } => KindDump::IndexDeletion,
|
||||||
KindWithContent::IndexCreation { primary_key, .. } => {
|
KindWithContent::IndexCreation { primary_key, .. } => {
|
||||||
KindDump::IndexCreation { primary_key }
|
KindDump::IndexCreation { primary_key }
|
||||||
@ -191,15 +182,9 @@ impl From<KindWithContent> for KindDump {
|
|||||||
KindWithContent::TaskDeletion { query, tasks } => {
|
KindWithContent::TaskDeletion { query, tasks } => {
|
||||||
KindDump::TasksDeletion { query, tasks }
|
KindDump::TasksDeletion { query, tasks }
|
||||||
}
|
}
|
||||||
KindWithContent::DumpExport {
|
KindWithContent::DumpExport { dump_uid, keys, instance_uid } => {
|
||||||
dump_uid,
|
KindDump::DumpExport { dump_uid, keys, instance_uid }
|
||||||
keys,
|
}
|
||||||
instance_uid,
|
|
||||||
} => KindDump::DumpExport {
|
|
||||||
dump_uid,
|
|
||||||
keys,
|
|
||||||
instance_uid,
|
|
||||||
},
|
|
||||||
KindWithContent::Snapshot => KindDump::Snapshot,
|
KindWithContent::Snapshot => KindDump::Snapshot,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -207,29 +192,25 @@ impl From<KindWithContent> for KindDump {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{
|
use std::fs::File;
|
||||||
fs::File,
|
use std::io::{Seek, SeekFrom};
|
||||||
io::{Seek, SeekFrom},
|
use std::str::FromStr;
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
use big_s::S;
|
use big_s::S;
|
||||||
use maplit::btreeset;
|
use maplit::btreeset;
|
||||||
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use meilisearch_types::keys::{Action, Key};
|
||||||
use meilisearch_types::milli::{self, update::Setting};
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::tasks::Status;
|
use meilisearch_types::milli::{self};
|
||||||
use meilisearch_types::{index_uid::IndexUid, star_or::StarOr};
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
use meilisearch_types::{
|
use meilisearch_types::star_or::StarOr;
|
||||||
settings::{Checked, Settings},
|
use meilisearch_types::tasks::{Details, Status};
|
||||||
tasks::Details,
|
|
||||||
};
|
|
||||||
use serde_json::{json, Map, Value};
|
use serde_json::{json, Map, Value};
|
||||||
use time::macros::datetime;
|
use time::macros::datetime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{
|
use crate::reader::Document;
|
||||||
reader::Document, DumpReader, DumpWriter, IndexMetadata, KindDump, TaskDump, Version,
|
use crate::{DumpReader, DumpWriter, IndexMetadata, KindDump, TaskDump, Version};
|
||||||
};
|
|
||||||
|
|
||||||
pub fn create_test_instance_uid() -> Uuid {
|
pub fn create_test_instance_uid() -> Uuid {
|
||||||
Uuid::parse_str("9e15e977-f2ae-4761-943f-1eaf75fd736d").unwrap()
|
Uuid::parse_str("9e15e977-f2ae-4761-943f-1eaf75fd736d").unwrap()
|
||||||
@ -326,14 +307,8 @@ pub(crate) mod test {
|
|||||||
finished_at: None,
|
finished_at: None,
|
||||||
},
|
},
|
||||||
Some(vec![
|
Some(vec![
|
||||||
json!({ "id": 4, "race": "leonberg" })
|
json!({ "id": 4, "race": "leonberg" }).as_object().unwrap().clone(),
|
||||||
.as_object()
|
json!({ "id": 5, "race": "patou" }).as_object().unwrap().clone(),
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
json!({ "id": 5, "race": "patou" })
|
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.clone(),
|
|
||||||
]),
|
]),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
@ -397,9 +372,7 @@ pub(crate) mod test {
|
|||||||
let documents = create_test_documents();
|
let documents = create_test_documents();
|
||||||
let settings = create_test_settings();
|
let settings = create_test_settings();
|
||||||
|
|
||||||
let mut index = dump
|
let mut index = dump.create_index("doggos", &create_test_index_metadata()).unwrap();
|
||||||
.create_index("doggos", &create_test_index_metadata())
|
|
||||||
.unwrap();
|
|
||||||
for document in &documents {
|
for document in &documents {
|
||||||
index.push_document(document).unwrap();
|
index.push_document(document).unwrap();
|
||||||
}
|
}
|
||||||
@ -445,10 +418,7 @@ pub(crate) mod test {
|
|||||||
// ==== checking the top level infos
|
// ==== checking the top level infos
|
||||||
assert_eq!(dump.version(), Version::V6);
|
assert_eq!(dump.version(), Version::V6);
|
||||||
assert!(dump.date().is_some());
|
assert!(dump.date().is_some());
|
||||||
assert_eq!(
|
assert_eq!(dump.instance_uid().unwrap().unwrap(), create_test_instance_uid());
|
||||||
dump.instance_uid().unwrap().unwrap(),
|
|
||||||
create_test_instance_uid()
|
|
||||||
);
|
|
||||||
|
|
||||||
// ==== checking the index
|
// ==== checking the index
|
||||||
let mut indexes = dump.indexes().unwrap();
|
let mut indexes = dump.indexes().unwrap();
|
||||||
@ -475,10 +445,7 @@ pub(crate) mod test {
|
|||||||
"A content file was expected for the task {}.",
|
"A content file was expected for the task {}.",
|
||||||
expected.0.uid
|
expected.0.uid
|
||||||
);
|
);
|
||||||
let updates = content_file
|
let updates = content_file.unwrap().collect::<Result<Vec<_>, _>>().unwrap();
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>, _>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(updates, expected_update);
|
assert_eq!(updates, expected_update);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,11 +4,10 @@ use std::str::FromStr;
|
|||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use super::v3_to_v4::CompatV3ToV4;
|
||||||
use crate::reader::{v2, v3, Document};
|
use crate::reader::{v2, v3, Document};
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
||||||
use super::v3_to_v4::CompatV3ToV4;
|
|
||||||
|
|
||||||
pub struct CompatV2ToV3 {
|
pub struct CompatV2ToV3 {
|
||||||
pub from: v2::V2Reader,
|
pub from: v2::V2Reader,
|
||||||
}
|
}
|
||||||
@ -22,10 +21,7 @@ impl CompatV2ToV3 {
|
|||||||
self.from
|
self.from
|
||||||
.index_uuid()
|
.index_uuid()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|index| v3::meta::IndexUuid {
|
.map(|index| v3::meta::IndexUuid { uid: index.uid, uuid: index.uuid })
|
||||||
uid: index.uid,
|
|
||||||
uuid: index.uuid,
|
|
||||||
})
|
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,10 +61,7 @@ impl CompatV2ToV3 {
|
|||||||
.tasks()
|
.tasks()
|
||||||
.map(move |task| {
|
.map(move |task| {
|
||||||
task.map(|(task, content_file)| {
|
task.map(|(task, content_file)| {
|
||||||
let task = v3::Task {
|
let task = v3::Task { uuid: task.uuid, update: task.update.into() };
|
||||||
uuid: task.uuid,
|
|
||||||
update: task.update.into(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Some((
|
Some((
|
||||||
task,
|
task,
|
||||||
@ -216,22 +209,22 @@ impl TryFrom<(v2::updates::UpdateMeta, Option<Uuid>)> for v3::updates::Update {
|
|||||||
|
|
||||||
fn try_from((update, uuid): (v2::updates::UpdateMeta, Option<Uuid>)) -> Result<Self> {
|
fn try_from((update, uuid): (v2::updates::UpdateMeta, Option<Uuid>)) -> Result<Self> {
|
||||||
Ok(match update {
|
Ok(match update {
|
||||||
v2::updates::UpdateMeta::DocumentsAddition {
|
v2::updates::UpdateMeta::DocumentsAddition { method, format: _, primary_key }
|
||||||
method,
|
if uuid.is_some() =>
|
||||||
format: _,
|
{
|
||||||
primary_key,
|
v3::updates::Update::DocumentAddition {
|
||||||
} if uuid.is_some() => v3::updates::Update::DocumentAddition {
|
primary_key,
|
||||||
primary_key,
|
method: match method {
|
||||||
method: match method {
|
v2::updates::IndexDocumentsMethod::ReplaceDocuments => {
|
||||||
v2::updates::IndexDocumentsMethod::ReplaceDocuments => {
|
v3::updates::IndexDocumentsMethod::ReplaceDocuments
|
||||||
v3::updates::IndexDocumentsMethod::ReplaceDocuments
|
}
|
||||||
}
|
v2::updates::IndexDocumentsMethod::UpdateDocuments => {
|
||||||
v2::updates::IndexDocumentsMethod::UpdateDocuments => {
|
v3::updates::IndexDocumentsMethod::UpdateDocuments
|
||||||
v3::updates::IndexDocumentsMethod::UpdateDocuments
|
}
|
||||||
}
|
},
|
||||||
},
|
content_uuid: uuid.unwrap(),
|
||||||
content_uuid: uuid.unwrap(),
|
}
|
||||||
},
|
}
|
||||||
v2::updates::UpdateMeta::DocumentsAddition { .. } => {
|
v2::updates::UpdateMeta::DocumentsAddition { .. } => {
|
||||||
return Err(crate::Error::MalformedTask)
|
return Err(crate::Error::MalformedTask)
|
||||||
}
|
}
|
||||||
@ -248,23 +241,21 @@ impl TryFrom<(v2::updates::UpdateMeta, Option<Uuid>)> for v3::updates::Update {
|
|||||||
|
|
||||||
pub fn update_from_unchecked_update_meta(update: v2::updates::UpdateMeta) -> v3::updates::Update {
|
pub fn update_from_unchecked_update_meta(update: v2::updates::UpdateMeta) -> v3::updates::Update {
|
||||||
match update {
|
match update {
|
||||||
v2::updates::UpdateMeta::DocumentsAddition {
|
v2::updates::UpdateMeta::DocumentsAddition { method, format: _, primary_key } => {
|
||||||
method,
|
v3::updates::Update::DocumentAddition {
|
||||||
format: _,
|
primary_key,
|
||||||
primary_key,
|
method: match method {
|
||||||
} => v3::updates::Update::DocumentAddition {
|
v2::updates::IndexDocumentsMethod::ReplaceDocuments => {
|
||||||
primary_key,
|
v3::updates::IndexDocumentsMethod::ReplaceDocuments
|
||||||
method: match method {
|
}
|
||||||
v2::updates::IndexDocumentsMethod::ReplaceDocuments => {
|
v2::updates::IndexDocumentsMethod::UpdateDocuments => {
|
||||||
v3::updates::IndexDocumentsMethod::ReplaceDocuments
|
v3::updates::IndexDocumentsMethod::UpdateDocuments
|
||||||
}
|
}
|
||||||
v2::updates::IndexDocumentsMethod::UpdateDocuments => {
|
},
|
||||||
v3::updates::IndexDocumentsMethod::UpdateDocuments
|
// we use this special uuid so we can recognize it if one day there is a bug related to this field.
|
||||||
}
|
content_uuid: Uuid::from_str("00112233-4455-6677-8899-aabbccddeeff").unwrap(),
|
||||||
},
|
}
|
||||||
// we use this special uuid so we can recognize it if one day there is a bug related to this field.
|
}
|
||||||
content_uuid: Uuid::from_str("00112233-4455-6677-8899-aabbccddeeff").unwrap(),
|
|
||||||
},
|
|
||||||
v2::updates::UpdateMeta::ClearDocuments => v3::updates::Update::ClearDocuments,
|
v2::updates::UpdateMeta::ClearDocuments => v3::updates::Update::ClearDocuments,
|
||||||
v2::updates::UpdateMeta::DeleteDocuments { ids } => {
|
v2::updates::UpdateMeta::DeleteDocuments { ids } => {
|
||||||
v3::updates::Update::DeleteDocuments(ids)
|
v3::updates::Update::DeleteDocuments(ids)
|
||||||
@ -354,10 +345,7 @@ impl<T> From<v2::Settings<T>> for v3::Settings<v3::Unchecked> {
|
|||||||
.map(|f| f.into_iter().collect()),
|
.map(|f| f.into_iter().collect()),
|
||||||
sortable_attributes: v3::Setting::NotSet,
|
sortable_attributes: v3::Setting::NotSet,
|
||||||
ranking_rules: option_to_setting(settings.ranking_rules).map(|criteria| {
|
ranking_rules: option_to_setting(settings.ranking_rules).map(|criteria| {
|
||||||
criteria
|
criteria.into_iter().map(|criterion| patch_ranking_rules(&criterion)).collect()
|
||||||
.into_iter()
|
|
||||||
.map(|criterion| patch_ranking_rules(&criterion))
|
|
||||||
.collect()
|
|
||||||
}),
|
}),
|
||||||
stop_words: option_to_setting(settings.stop_words),
|
stop_words: option_to_setting(settings.stop_words),
|
||||||
synonyms: option_to_setting(settings.synonyms),
|
synonyms: option_to_setting(settings.synonyms),
|
||||||
@ -383,7 +371,8 @@ fn patch_ranking_rules(ranking_rule: &str) -> String {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fs::File, io::BufReader};
|
use std::fs::File;
|
||||||
|
use std::io::BufReader;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -412,11 +401,7 @@ pub(crate) mod test {
|
|||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
|
||||||
let update_file = update_files
|
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.remove(0)
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
||||||
|
|
||||||
// indexes
|
// indexes
|
||||||
@ -441,11 +426,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"f43338ecceeddd1ce13ffd55438b2347");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"f43338ecceeddd1ce13ffd55438b2347");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
@ -460,11 +441,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"0d76c745cb334e8c20d6d6a14df733e1");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"0d76c745cb334e8c20d6d6a14df733e1");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
|
|
||||||
@ -479,11 +456,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"09a2f7c571729f70f4cd93e24e8e3f28");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"09a2f7c571729f70f4cd93e24e8e3f28");
|
||||||
let documents = movies2
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
@ -498,11 +471,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"09a2f7c571729f70f4cd93e24e8e3f28");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"09a2f7c571729f70f4cd93e24e8e3f28");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
use crate::reader::{v3, v4, UpdateFile};
|
|
||||||
use crate::Result;
|
|
||||||
|
|
||||||
use super::v2_to_v3::{CompatIndexV2ToV3, CompatV2ToV3};
|
use super::v2_to_v3::{CompatIndexV2ToV3, CompatV2ToV3};
|
||||||
use super::v4_to_v5::CompatV4ToV5;
|
use super::v4_to_v5::CompatV4ToV5;
|
||||||
|
use crate::reader::{v3, v4, UpdateFile};
|
||||||
|
use crate::Result;
|
||||||
|
|
||||||
pub enum CompatV3ToV4 {
|
pub enum CompatV3ToV4 {
|
||||||
V3(v3::V3Reader),
|
V3(v3::V3Reader),
|
||||||
@ -38,18 +37,15 @@ impl CompatV3ToV4 {
|
|||||||
|
|
||||||
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<CompatIndexV3ToV4>> + '_> {
|
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<CompatIndexV3ToV4>> + '_> {
|
||||||
Ok(match self {
|
Ok(match self {
|
||||||
CompatV3ToV4::V3(v3) => Box::new(
|
CompatV3ToV4::V3(v3) => {
|
||||||
v3.indexes()?
|
Box::new(v3.indexes()?.map(|index| index.map(CompatIndexV3ToV4::from)))
|
||||||
.map(|index| index.map(CompatIndexV3ToV4::from)),
|
as Box<dyn Iterator<Item = Result<CompatIndexV3ToV4>> + '_>
|
||||||
)
|
}
|
||||||
as Box<dyn Iterator<Item = Result<CompatIndexV3ToV4>> + '_>,
|
|
||||||
|
|
||||||
CompatV3ToV4::Compat(compat) => Box::new(
|
CompatV3ToV4::Compat(compat) => {
|
||||||
compat
|
Box::new(compat.indexes()?.map(|index| index.map(CompatIndexV3ToV4::from)))
|
||||||
.indexes()?
|
as Box<dyn Iterator<Item = Result<CompatIndexV3ToV4>> + '_>
|
||||||
.map(|index| index.map(CompatIndexV3ToV4::from)),
|
}
|
||||||
)
|
|
||||||
as Box<dyn Iterator<Item = Result<CompatIndexV3ToV4>> + '_>,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -341,7 +337,8 @@ impl<T> From<v3::Settings<T>> for v4::Settings<v4::Unchecked> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fs::File, io::BufReader};
|
use std::fs::File;
|
||||||
|
use std::io::BufReader;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -370,11 +367,7 @@ pub(crate) mod test {
|
|||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
|
||||||
let update_file = update_files
|
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.remove(0)
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
||||||
|
|
||||||
// keys
|
// keys
|
||||||
@ -403,11 +396,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ea46dd6b58c5e1d65c1c8159a32695ea");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ea46dd6b58c5e1d65c1c8159a32695ea");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
@ -422,11 +411,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"4df4074ef6bfb71e8dc66d08ff8c9dfd");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"4df4074ef6bfb71e8dc66d08ff8c9dfd");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
|
|
||||||
@ -441,11 +426,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"24eaf4046d9718dabff36f35103352d4");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"24eaf4046d9718dabff36f35103352d4");
|
||||||
let documents = movies2
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
@ -460,11 +441,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"24eaf4046d9718dabff36f35103352d4");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"24eaf4046d9718dabff36f35103352d4");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
use crate::reader::{v4, v5, Document};
|
|
||||||
use crate::Result;
|
|
||||||
|
|
||||||
use super::v3_to_v4::{CompatIndexV3ToV4, CompatV3ToV4};
|
use super::v3_to_v4::{CompatIndexV3ToV4, CompatV3ToV4};
|
||||||
use super::v5_to_v6::CompatV5ToV6;
|
use super::v5_to_v6::CompatV5ToV6;
|
||||||
|
use crate::reader::{v4, v5, Document};
|
||||||
|
use crate::Result;
|
||||||
|
|
||||||
pub enum CompatV4ToV5 {
|
pub enum CompatV4ToV5 {
|
||||||
V4(v4::V4Reader),
|
V4(v4::V4Reader),
|
||||||
@ -41,18 +40,15 @@ impl CompatV4ToV5 {
|
|||||||
|
|
||||||
pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>> {
|
pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>> {
|
||||||
Ok(match self {
|
Ok(match self {
|
||||||
CompatV4ToV5::V4(v4) => Box::new(
|
CompatV4ToV5::V4(v4) => {
|
||||||
v4.indexes()?
|
Box::new(v4.indexes()?.map(|index| index.map(CompatIndexV4ToV5::from)))
|
||||||
.map(|index| index.map(CompatIndexV4ToV5::from)),
|
as Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>
|
||||||
)
|
}
|
||||||
as Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>,
|
|
||||||
|
|
||||||
CompatV4ToV5::Compat(compat) => Box::new(
|
CompatV4ToV5::Compat(compat) => {
|
||||||
compat
|
Box::new(compat.indexes()?.map(|index| index.map(CompatIndexV4ToV5::from)))
|
||||||
.indexes()?
|
as Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>
|
||||||
.map(|index| index.map(CompatIndexV4ToV5::from)),
|
}
|
||||||
)
|
|
||||||
as Box<dyn Iterator<Item = Result<CompatIndexV4ToV5>> + '_>,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -138,13 +134,9 @@ impl CompatV4ToV5 {
|
|||||||
v4::tasks::TaskEvent::Created(date) => {
|
v4::tasks::TaskEvent::Created(date) => {
|
||||||
v5::tasks::TaskEvent::Created(date)
|
v5::tasks::TaskEvent::Created(date)
|
||||||
}
|
}
|
||||||
v4::tasks::TaskEvent::Batched {
|
v4::tasks::TaskEvent::Batched { timestamp, batch_id } => {
|
||||||
timestamp,
|
v5::tasks::TaskEvent::Batched { timestamp, batch_id }
|
||||||
batch_id,
|
}
|
||||||
} => v5::tasks::TaskEvent::Batched {
|
|
||||||
timestamp,
|
|
||||||
batch_id,
|
|
||||||
},
|
|
||||||
v4::tasks::TaskEvent::Processing(date) => {
|
v4::tasks::TaskEvent::Processing(date) => {
|
||||||
v5::tasks::TaskEvent::Processing(date)
|
v5::tasks::TaskEvent::Processing(date)
|
||||||
}
|
}
|
||||||
@ -196,11 +188,7 @@ impl CompatV4ToV5 {
|
|||||||
description: key.description,
|
description: key.description,
|
||||||
name: None,
|
name: None,
|
||||||
uid: v5::keys::KeyId::new_v4(),
|
uid: v5::keys::KeyId::new_v4(),
|
||||||
actions: key
|
actions: key.actions.into_iter().filter_map(|action| action.into()).collect(),
|
||||||
.actions
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|action| action.into())
|
|
||||||
.collect(),
|
|
||||||
indexes: key
|
indexes: key
|
||||||
.indexes
|
.indexes
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -385,7 +373,8 @@ impl From<v4::Action> for Option<v5::Action> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fs::File, io::BufReader};
|
use std::fs::File;
|
||||||
|
use std::io::BufReader;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -440,11 +429,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ed1a6977a832b1ab49cd5068b77ce498");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ed1a6977a832b1ab49cd5068b77ce498");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
|
|
||||||
@ -459,11 +444,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"70681af1d52411218036fbd5a9b94ab5");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"70681af1d52411218036fbd5a9b94ab5");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
||||||
|
|
||||||
@ -478,11 +459,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7019bb8f146004dcdd91fc3c3254b742");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7019bb8f146004dcdd91fc3c3254b742");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
|
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
|
||||||
use crate::reader::{v5, v6, Document, UpdateFile};
|
use crate::reader::{v5, v6, Document, UpdateFile};
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
||||||
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
|
|
||||||
|
|
||||||
pub enum CompatV5ToV6 {
|
pub enum CompatV5ToV6 {
|
||||||
V5(v5::V5Reader),
|
V5(v5::V5Reader),
|
||||||
Compat(CompatV4ToV5),
|
Compat(CompatV4ToV5),
|
||||||
@ -36,18 +35,15 @@ impl CompatV5ToV6 {
|
|||||||
|
|
||||||
pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>> {
|
pub fn indexes(&self) -> Result<Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>> {
|
||||||
let indexes = match self {
|
let indexes = match self {
|
||||||
CompatV5ToV6::V5(v5) => Box::new(
|
CompatV5ToV6::V5(v5) => {
|
||||||
v5.indexes()?
|
Box::new(v5.indexes()?.map(|index| index.map(CompatIndexV5ToV6::from)))
|
||||||
.map(|index| index.map(CompatIndexV5ToV6::from)),
|
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>
|
||||||
)
|
}
|
||||||
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>,
|
|
||||||
|
|
||||||
CompatV5ToV6::Compat(compat) => Box::new(
|
CompatV5ToV6::Compat(compat) => {
|
||||||
compat
|
Box::new(compat.indexes()?.map(|index| index.map(CompatIndexV5ToV6::from)))
|
||||||
.indexes()?
|
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>
|
||||||
.map(|index| index.map(CompatIndexV5ToV6::from)),
|
}
|
||||||
)
|
|
||||||
as Box<dyn Iterator<Item = Result<CompatIndexV5ToV6>> + '_>,
|
|
||||||
};
|
};
|
||||||
Ok(indexes)
|
Ok(indexes)
|
||||||
}
|
}
|
||||||
@ -127,16 +123,15 @@ impl CompatV5ToV6 {
|
|||||||
},
|
},
|
||||||
canceled_by: None,
|
canceled_by: None,
|
||||||
details: task_view.details.map(|details| match details {
|
details: task_view.details.map(|details| match details {
|
||||||
v5::Details::DocumentAddition {
|
v5::Details::DocumentAddition { received_documents, indexed_documents } => {
|
||||||
received_documents,
|
v6::Details::DocumentAddition {
|
||||||
indexed_documents,
|
received_documents: received_documents as u64,
|
||||||
} => v6::Details::DocumentAddition {
|
indexed_documents: indexed_documents.map(|i| i as u64),
|
||||||
received_documents: received_documents as u64,
|
}
|
||||||
indexed_documents: indexed_documents.map(|i| i as u64),
|
}
|
||||||
},
|
v5::Details::Settings { settings } => {
|
||||||
v5::Details::Settings { settings } => v6::Details::Settings {
|
v6::Details::Settings { settings: settings.into() }
|
||||||
settings: settings.into(),
|
}
|
||||||
},
|
|
||||||
v5::Details::IndexInfo { primary_key } => {
|
v5::Details::IndexInfo { primary_key } => {
|
||||||
v6::Details::IndexInfo { primary_key }
|
v6::Details::IndexInfo { primary_key }
|
||||||
}
|
}
|
||||||
@ -174,11 +169,7 @@ impl CompatV5ToV6 {
|
|||||||
description: key.description,
|
description: key.description,
|
||||||
name: key.name,
|
name: key.name,
|
||||||
uid: key.uid,
|
uid: key.uid,
|
||||||
actions: key
|
actions: key.actions.into_iter().map(|action| action.into()).collect(),
|
||||||
.actions
|
|
||||||
.into_iter()
|
|
||||||
.map(|action| action.into())
|
|
||||||
.collect(),
|
|
||||||
indexes: key
|
indexes: key
|
||||||
.indexes
|
.indexes
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -396,7 +387,8 @@ impl From<v5::Action> for v6::Action {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fs::File, io::BufReader};
|
use std::fs::File;
|
||||||
|
use std::io::BufReader;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -452,11 +444,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
|
|
||||||
@ -471,11 +459,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 200);
|
assert_eq!(documents.len(), 200);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
||||||
|
|
||||||
@ -490,11 +474,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
use std::io::Read;
|
use std::fs::File;
|
||||||
use std::{fs::File, io::BufReader};
|
use std::io::{BufReader, Read};
|
||||||
|
|
||||||
|
use flate2::bufread::GzDecoder;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use self::compat::v4_to_v5::CompatV4ToV5;
|
use self::compat::v4_to_v5::CompatV4ToV5;
|
||||||
use self::compat::v5_to_v6::{CompatIndexV5ToV6, CompatV5ToV6};
|
use self::compat::v5_to_v6::{CompatIndexV5ToV6, CompatV5ToV6};
|
||||||
@ -7,10 +11,6 @@ use self::v5::V5Reader;
|
|||||||
use self::v6::{V6IndexReader, V6Reader};
|
use self::v6::{V6IndexReader, V6Reader};
|
||||||
use crate::{Error, Result, Version};
|
use crate::{Error, Result, Version};
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
|
||||||
use serde::Deserialize;
|
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
mod compat;
|
mod compat;
|
||||||
|
|
||||||
// pub(self) mod v1;
|
// pub(self) mod v1;
|
||||||
@ -47,12 +47,7 @@ impl DumpReader {
|
|||||||
match dump_version {
|
match dump_version {
|
||||||
// Version::V1 => Ok(Box::new(v1::Reader::open(path)?)),
|
// Version::V1 => Ok(Box::new(v1::Reader::open(path)?)),
|
||||||
Version::V1 => Err(Error::DumpV1Unsupported),
|
Version::V1 => Err(Error::DumpV1Unsupported),
|
||||||
Version::V2 => Ok(v2::V2Reader::open(path)?
|
Version::V2 => Ok(v2::V2Reader::open(path)?.to_v3().to_v4().to_v5().to_v6().into()),
|
||||||
.to_v3()
|
|
||||||
.to_v4()
|
|
||||||
.to_v5()
|
|
||||||
.to_v6()
|
|
||||||
.into()),
|
|
||||||
Version::V3 => Ok(v3::V3Reader::open(path)?.to_v4().to_v5().to_v6().into()),
|
Version::V3 => Ok(v3::V3Reader::open(path)?.to_v4().to_v5().to_v6().into()),
|
||||||
Version::V4 => Ok(v4::V4Reader::open(path)?.to_v5().to_v6().into()),
|
Version::V4 => Ok(v4::V4Reader::open(path)?.to_v5().to_v6().into()),
|
||||||
Version::V5 => Ok(v5::V5Reader::open(path)?.to_v6().into()),
|
Version::V5 => Ok(v5::V5Reader::open(path)?.to_v6().into()),
|
||||||
@ -234,11 +229,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
|
|
||||||
@ -253,11 +244,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 200);
|
assert_eq!(documents.len(), 200);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
||||||
|
|
||||||
@ -272,11 +259,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
@ -323,11 +306,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ed1a6977a832b1ab49cd5068b77ce498");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ed1a6977a832b1ab49cd5068b77ce498");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
|
|
||||||
@ -342,11 +321,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"70681af1d52411218036fbd5a9b94ab5");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"70681af1d52411218036fbd5a9b94ab5");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
||||||
|
|
||||||
@ -361,11 +336,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7019bb8f146004dcdd91fc3c3254b742");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7019bb8f146004dcdd91fc3c3254b742");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
@ -413,11 +384,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"1a5ed16d00e6163662d9d7ffe400c5d0");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"1a5ed16d00e6163662d9d7ffe400c5d0");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
@ -432,11 +399,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"9a6b511669b8f53d193d2f0bd1671baa");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"9a6b511669b8f53d193d2f0bd1671baa");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
|
|
||||||
@ -451,11 +414,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"4fdf905496d9a511800ff523728728ac");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"4fdf905496d9a511800ff523728728ac");
|
||||||
let documents = movies2
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
@ -470,11 +429,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"4fdf905496d9a511800ff523728728ac");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"4fdf905496d9a511800ff523728728ac");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
@ -522,11 +477,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"a7d4fed93bfc91d0f1126d3371abf48e");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"a7d4fed93bfc91d0f1126d3371abf48e");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
@ -541,11 +492,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"e79c3cc4eef44bd22acfb60957b459d9");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"e79c3cc4eef44bd22acfb60957b459d9");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
|
|
||||||
@ -560,11 +507,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"7917f954b6f345336073bb155540ad6d");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"7917f954b6f345336073bb155540ad6d");
|
||||||
let documents = movies2
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
@ -579,11 +522,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7917f954b6f345336073bb155540ad6d");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7917f954b6f345336073bb155540ad6d");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -22,11 +22,9 @@
|
|||||||
//! └── update_202573df-718b-4d80-9a65-2ee397c23dc3
|
//! └── update_202573df-718b-4d80-9a65-2ee397c23dc3
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use std::{
|
use std::fs::{self, File};
|
||||||
fs::{self, File},
|
use std::io::{BufRead, BufReader};
|
||||||
io::{BufRead, BufReader},
|
use std::path::Path;
|
||||||
path::Path,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -37,11 +35,10 @@ pub mod meta;
|
|||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod updates;
|
pub mod updates;
|
||||||
|
|
||||||
use crate::{IndexMetadata, Result, Version};
|
|
||||||
|
|
||||||
use self::meta::{DumpMeta, IndexUuid};
|
use self::meta::{DumpMeta, IndexUuid};
|
||||||
|
use super::compat::v2_to_v3::CompatV2ToV3;
|
||||||
use super::{compat::v2_to_v3::CompatV2ToV3, Document};
|
use super::Document;
|
||||||
|
use crate::{IndexMetadata, Result, Version};
|
||||||
|
|
||||||
pub type Settings<T> = settings::Settings<T>;
|
pub type Settings<T> = settings::Settings<T>;
|
||||||
pub type Checked = settings::Checked;
|
pub type Checked = settings::Checked;
|
||||||
@ -110,11 +107,7 @@ impl V2Reader {
|
|||||||
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
||||||
Ok(V2IndexReader::new(
|
Ok(V2IndexReader::new(
|
||||||
index.uid.clone(),
|
index.uid.clone(),
|
||||||
&self
|
&self.dump.path().join("indexes").join(format!("index-{}", index.uuid.to_string())),
|
||||||
.dump
|
|
||||||
.path()
|
|
||||||
.join("indexes")
|
|
||||||
.join(format!("index-{}", index.uuid.to_string())),
|
|
||||||
)?)
|
)?)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -193,10 +186,7 @@ pub struct UpdateFile {
|
|||||||
impl UpdateFile {
|
impl UpdateFile {
|
||||||
fn new(path: &Path) -> Result<Self> {
|
fn new(path: &Path) -> Result<Self> {
|
||||||
let reader = BufReader::new(File::open(path)?);
|
let reader = BufReader::new(File::open(path)?);
|
||||||
Ok(UpdateFile {
|
Ok(UpdateFile { documents: serde_json::from_reader(reader)?, index: 0 })
|
||||||
documents: serde_json::from_reader(reader)?,
|
|
||||||
index: 0,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -211,7 +201,8 @@ impl Iterator for UpdateFile {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fs::File, io::BufReader};
|
use std::fs::File;
|
||||||
|
use std::io::BufReader;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -240,11 +231,7 @@ pub(crate) mod test {
|
|||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
|
||||||
let update_file = update_files
|
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.remove(0)
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
||||||
|
|
||||||
// indexes
|
// indexes
|
||||||
@ -269,11 +256,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"b4814eab5e73e2dcfc90aad50aa583d1");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"b4814eab5e73e2dcfc90aad50aa583d1");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
@ -288,11 +271,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"59dd69f590635a58f3d99edc9e1fa21f");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"59dd69f590635a58f3d99edc9e1fa21f");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
|
|
||||||
@ -307,11 +286,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"ac041085004c43373fe90dc48f5c23ab");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"ac041085004c43373fe90dc48f5c23ab");
|
||||||
let documents = movies2
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
@ -326,11 +301,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"ac041085004c43373fe90dc48f5c23ab");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"ac041085004c43373fe90dc48f5c23ab");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
use std::{
|
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||||
collections::{BTreeMap, BTreeSet, HashSet},
|
use std::marker::PhantomData;
|
||||||
marker::PhantomData,
|
use std::str::FromStr;
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
@ -39,10 +37,7 @@ pub struct Unchecked;
|
|||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[serde(bound(
|
#[serde(bound(serialize = "T: serde::Serialize", deserialize = "T: Deserialize<'static>"))]
|
||||||
serialize = "T: serde::Serialize",
|
|
||||||
deserialize = "T: Deserialize<'static>"
|
|
||||||
))]
|
|
||||||
pub struct Settings<T> {
|
pub struct Settings<T> {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
|
@ -22,11 +22,9 @@
|
|||||||
//! └── 66d3f12d-fcf3-4b53-88cb-407017373de7
|
//! └── 66d3f12d-fcf3-4b53-88cb-407017373de7
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use std::{
|
use std::fs::{self, File};
|
||||||
fs::{self, File},
|
use std::io::{BufRead, BufReader};
|
||||||
io::{BufRead, BufReader},
|
use std::path::Path;
|
||||||
path::Path,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -37,11 +35,10 @@ pub mod meta;
|
|||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod updates;
|
pub mod updates;
|
||||||
|
|
||||||
use crate::{Error, IndexMetadata, Result, Version};
|
|
||||||
|
|
||||||
use self::meta::{DumpMeta, IndexUuid};
|
use self::meta::{DumpMeta, IndexUuid};
|
||||||
|
use super::compat::v3_to_v4::CompatV3ToV4;
|
||||||
use super::{compat::v3_to_v4::CompatV3ToV4, Document};
|
use super::Document;
|
||||||
|
use crate::{Error, IndexMetadata, Result, Version};
|
||||||
|
|
||||||
pub type Settings<T> = settings::Settings<T>;
|
pub type Settings<T> = settings::Settings<T>;
|
||||||
pub type Checked = settings::Checked;
|
pub type Checked = settings::Checked;
|
||||||
@ -116,11 +113,7 @@ impl V3Reader {
|
|||||||
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
||||||
Ok(V3IndexReader::new(
|
Ok(V3IndexReader::new(
|
||||||
index.uid.clone(),
|
index.uid.clone(),
|
||||||
&self
|
&self.dump.path().join("indexes").join(index.uuid.to_string()),
|
||||||
.dump
|
|
||||||
.path()
|
|
||||||
.join("indexes")
|
|
||||||
.join(index.uuid.to_string()),
|
|
||||||
)?)
|
)?)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -204,9 +197,7 @@ pub struct UpdateFile {
|
|||||||
|
|
||||||
impl UpdateFile {
|
impl UpdateFile {
|
||||||
fn new(path: &Path) -> Result<Self> {
|
fn new(path: &Path) -> Result<Self> {
|
||||||
Ok(UpdateFile {
|
Ok(UpdateFile { reader: BufReader::new(File::open(path)?) })
|
||||||
reader: BufReader::new(File::open(path)?),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -226,7 +217,8 @@ impl Iterator for UpdateFile {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fs::File, io::BufReader};
|
use std::fs::File;
|
||||||
|
use std::io::BufReader;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -255,11 +247,7 @@ pub(crate) mod test {
|
|||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
|
||||||
let update_file = update_files
|
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.remove(0)
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
||||||
|
|
||||||
// indexes
|
// indexes
|
||||||
@ -284,11 +272,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"7460d4b242b5c8b1bda223f63bbbf349");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"7460d4b242b5c8b1bda223f63bbbf349");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
@ -303,11 +287,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d83ab8e79bb44595667d6ce3e6629a4f");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d83ab8e79bb44595667d6ce3e6629a4f");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
|
|
||||||
@ -322,11 +302,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"44d3b5a3b3aa6cd950373ff751d05bb7");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"44d3b5a3b3aa6cd950373ff751d05bb7");
|
||||||
let documents = movies2
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
@ -341,11 +317,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"44d3b5a3b3aa6cd950373ff751d05bb7");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"44d3b5a3b3aa6cd950373ff751d05bb7");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
use std::{
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
collections::{BTreeMap, BTreeSet},
|
use std::marker::PhantomData;
|
||||||
marker::PhantomData,
|
use std::num::NonZeroUsize;
|
||||||
num::NonZeroUsize,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::{Deserialize, Deserializer};
|
||||||
|
|
||||||
@ -40,10 +38,7 @@ pub struct Unchecked;
|
|||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[serde(bound(
|
#[serde(bound(serialize = "T: serde::Serialize", deserialize = "T: Deserialize<'static>"))]
|
||||||
serialize = "T: serde::Serialize",
|
|
||||||
deserialize = "T: Deserialize<'static>"
|
|
||||||
))]
|
|
||||||
pub struct Settings<T> {
|
pub struct Settings<T> {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
|
@ -8,10 +8,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||||
pub struct ResponseError {
|
pub struct ResponseError {
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
#[cfg_attr(
|
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
|
||||||
feature = "test-traits",
|
|
||||||
proptest(strategy = "strategy::status_code_strategy()")
|
|
||||||
)]
|
|
||||||
pub code: StatusCode,
|
pub code: StatusCode,
|
||||||
pub message: String,
|
pub message: String,
|
||||||
#[serde(rename = "code")]
|
#[serde(rename = "code")]
|
||||||
@ -206,10 +203,9 @@ impl Code {
|
|||||||
|
|
||||||
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
||||||
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
||||||
DatabaseSizeLimitReached => ErrCode::internal(
|
DatabaseSizeLimitReached => {
|
||||||
"database_size_limit_reached",
|
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
}
|
||||||
),
|
|
||||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
||||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
|
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
|
||||||
@ -302,26 +298,14 @@ struct ErrCode {
|
|||||||
|
|
||||||
impl ErrCode {
|
impl ErrCode {
|
||||||
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::AuthenticationError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::InternalError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::InvalidRequestError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
use std::{
|
use std::fmt::{self, Display, Formatter};
|
||||||
fmt::{self, Display, Formatter},
|
use std::marker::PhantomData;
|
||||||
marker::PhantomData,
|
use std::str::FromStr;
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{de::Visitor, Deserialize, Deserializer};
|
use serde::de::Visitor;
|
||||||
|
use serde::{Deserialize, Deserializer};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::settings::{Settings, Unchecked};
|
use super::settings::{Settings, Unchecked};
|
||||||
@ -39,9 +38,7 @@ impl TryFrom<String> for IndexUid {
|
|||||||
type Error = IndexUidFormatError;
|
type Error = IndexUidFormatError;
|
||||||
|
|
||||||
fn try_from(uid: String) -> Result<Self, Self::Error> {
|
fn try_from(uid: String) -> Result<Self, Self::Error> {
|
||||||
if !uid
|
if !uid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
||||||
.chars()
|
|
||||||
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
|
||||||
|| uid.is_empty()
|
|| uid.is_empty()
|
||||||
|| uid.len() > 400
|
|| uid.len() > 400
|
||||||
{
|
{
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
use std::{
|
use std::fs::{self, File};
|
||||||
fs::{self, File},
|
use std::io::{BufRead, BufReader};
|
||||||
io::{BufRead, BufReader},
|
use std::path::Path;
|
||||||
path::Path,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -15,11 +13,9 @@ pub mod meta;
|
|||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
|
|
||||||
use crate::{Error, IndexMetadata, Result, Version};
|
|
||||||
|
|
||||||
use self::meta::{DumpMeta, IndexUuid};
|
use self::meta::{DumpMeta, IndexUuid};
|
||||||
|
|
||||||
use super::compat::v4_to_v5::CompatV4ToV5;
|
use super::compat::v4_to_v5::CompatV4ToV5;
|
||||||
|
use crate::{Error, IndexMetadata, Result, Version};
|
||||||
|
|
||||||
pub type Document = serde_json::Map<String, serde_json::Value>;
|
pub type Document = serde_json::Map<String, serde_json::Value>;
|
||||||
pub type Settings<T> = settings::Settings<T>;
|
pub type Settings<T> = settings::Settings<T>;
|
||||||
@ -100,11 +96,7 @@ impl V4Reader {
|
|||||||
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
||||||
Ok(V4IndexReader::new(
|
Ok(V4IndexReader::new(
|
||||||
index.uid.clone(),
|
index.uid.clone(),
|
||||||
&self
|
&self.dump.path().join("indexes").join(index.index_meta.uuid.to_string()),
|
||||||
.dump
|
|
||||||
.path()
|
|
||||||
.join("indexes")
|
|
||||||
.join(index.index_meta.uuid.to_string()),
|
|
||||||
)?)
|
)?)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -139,9 +131,7 @@ impl V4Reader {
|
|||||||
|
|
||||||
pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
|
pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
|
||||||
Box::new(
|
Box::new(
|
||||||
(&mut self.keys)
|
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
||||||
.lines()
|
|
||||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -196,9 +186,7 @@ pub struct UpdateFile {
|
|||||||
|
|
||||||
impl UpdateFile {
|
impl UpdateFile {
|
||||||
fn new(path: &Path) -> Result<Self> {
|
fn new(path: &Path) -> Result<Self> {
|
||||||
Ok(UpdateFile {
|
Ok(UpdateFile { reader: BufReader::new(File::open(path)?) })
|
||||||
reader: BufReader::new(File::open(path)?),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -218,7 +206,8 @@ impl Iterator for UpdateFile {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fs::File, io::BufReader};
|
use std::fs::File;
|
||||||
|
use std::io::BufReader;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -248,11 +237,7 @@ pub(crate) mod test {
|
|||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
|
||||||
let update_file = update_files
|
let update_file = update_files.remove(0).unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.remove(0)
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
||||||
|
|
||||||
// keys
|
// keys
|
||||||
@ -280,11 +265,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ace6546a6eb856ecb770b2409975c01d");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"ace6546a6eb856ecb770b2409975c01d");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
|
|
||||||
@ -299,11 +280,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"4dfa34fa34f2c03259482e1e4555faa8");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"4dfa34fa34f2c03259482e1e4555faa8");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
||||||
|
|
||||||
@ -318,11 +295,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"1aa241a5e3afd8c85a4e7b9db42362d7");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"1aa241a5e3afd8c85a4e7b9db42362d7");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
use std::{
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
collections::{BTreeMap, BTreeSet},
|
use std::marker::PhantomData;
|
||||||
marker::PhantomData,
|
use std::num::NonZeroUsize;
|
||||||
num::NonZeroUsize,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::{Deserialize, Deserializer};
|
||||||
|
|
||||||
@ -65,10 +63,7 @@ pub struct TypoSettings {
|
|||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[serde(bound(
|
#[serde(bound(serialize = "T: serde::Serialize", deserialize = "T: Deserialize<'static>"))]
|
||||||
serialize = "T: serde::Serialize",
|
|
||||||
deserialize = "T: Deserialize<'static>"
|
|
||||||
))]
|
|
||||||
pub struct Settings<T> {
|
pub struct Settings<T> {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
|
@ -2,11 +2,9 @@ use serde::Deserialize;
|
|||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::{
|
use super::errors::ResponseError;
|
||||||
errors::ResponseError,
|
use super::meta::IndexUid;
|
||||||
meta::IndexUid,
|
use super::settings::{Settings, Unchecked};
|
||||||
settings::{Settings, Unchecked},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub type TaskId = u32;
|
pub type TaskId = u32;
|
||||||
pub type BatchId = u32;
|
pub type BatchId = u32;
|
||||||
@ -109,10 +107,9 @@ impl Task {
|
|||||||
/// Return the content_uuid of the `Task` if there is one.
|
/// Return the content_uuid of the `Task` if there is one.
|
||||||
pub fn get_content_uuid(&self) -> Option<Uuid> {
|
pub fn get_content_uuid(&self) -> Option<Uuid> {
|
||||||
match self {
|
match self {
|
||||||
Task {
|
Task { content: TaskContent::DocumentAddition { content_uuid, .. }, .. } => {
|
||||||
content: TaskContent::DocumentAddition { content_uuid, .. },
|
Some(*content_uuid)
|
||||||
..
|
}
|
||||||
} => Some(*content_uuid),
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -142,10 +142,9 @@ impl Code {
|
|||||||
|
|
||||||
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
||||||
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
||||||
DatabaseSizeLimitReached => ErrCode::internal(
|
DatabaseSizeLimitReached => {
|
||||||
"database_size_limit_reached",
|
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
}
|
||||||
),
|
|
||||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
||||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
|
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
|
||||||
@ -241,27 +240,15 @@ struct ErrCode {
|
|||||||
|
|
||||||
impl ErrCode {
|
impl ErrCode {
|
||||||
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::AuthenticationError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::InternalError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::InvalidRequestError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
use std::{
|
use std::fmt::{self, Display, Formatter};
|
||||||
fmt::{self, Display, Formatter},
|
use std::marker::PhantomData;
|
||||||
marker::PhantomData,
|
use std::str::FromStr;
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{de::Visitor, Deserialize, Deserializer};
|
use serde::de::Visitor;
|
||||||
|
use serde::{Deserialize, Deserializer};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::settings::{Settings, Unchecked};
|
use super::settings::{Settings, Unchecked};
|
||||||
@ -39,9 +38,7 @@ impl TryFrom<String> for IndexUid {
|
|||||||
type Error = IndexUidFormatError;
|
type Error = IndexUidFormatError;
|
||||||
|
|
||||||
fn try_from(uid: String) -> Result<Self, Self::Error> {
|
fn try_from(uid: String) -> Result<Self, Self::Error> {
|
||||||
if !uid
|
if !uid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
||||||
.chars()
|
|
||||||
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
|
||||||
|| uid.is_empty()
|
|| uid.is_empty()
|
||||||
|| uid.len() > 400
|
|| uid.len() > 400
|
||||||
{
|
{
|
||||||
|
@ -32,21 +32,19 @@
|
|||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
|
|
||||||
use std::{
|
use std::fs::{self, File};
|
||||||
fs::{self, File},
|
use std::io::{BufRead, BufReader, Seek, SeekFrom};
|
||||||
io::{BufRead, BufReader, Seek, SeekFrom},
|
use std::path::Path;
|
||||||
path::Path,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use super::compat::v5_to_v6::CompatV5ToV6;
|
||||||
|
use super::Document;
|
||||||
use crate::{Error, IndexMetadata, Result, Version};
|
use crate::{Error, IndexMetadata, Result, Version};
|
||||||
|
|
||||||
use super::{compat::v5_to_v6::CompatV5ToV6, Document};
|
|
||||||
|
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod keys;
|
pub mod keys;
|
||||||
pub mod meta;
|
pub mod meta;
|
||||||
@ -139,11 +137,7 @@ impl V5Reader {
|
|||||||
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
||||||
Ok(V5IndexReader::new(
|
Ok(V5IndexReader::new(
|
||||||
index.uid.clone(),
|
index.uid.clone(),
|
||||||
&self
|
&self.dump.path().join("indexes").join(index.index_meta.uuid.to_string()),
|
||||||
.dump
|
|
||||||
.path()
|
|
||||||
.join("indexes")
|
|
||||||
.join(index.index_meta.uuid.to_string()),
|
|
||||||
)?)
|
)?)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -178,9 +172,9 @@ impl V5Reader {
|
|||||||
|
|
||||||
pub fn keys(&mut self) -> Result<Box<dyn Iterator<Item = Result<Key>> + '_>> {
|
pub fn keys(&mut self) -> Result<Box<dyn Iterator<Item = Result<Key>> + '_>> {
|
||||||
self.keys.seek(SeekFrom::Start(0))?;
|
self.keys.seek(SeekFrom::Start(0))?;
|
||||||
Ok(Box::new((&mut self.keys).lines().map(
|
Ok(Box::new(
|
||||||
|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) },
|
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
||||||
)))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -234,9 +228,7 @@ pub struct UpdateFile {
|
|||||||
|
|
||||||
impl UpdateFile {
|
impl UpdateFile {
|
||||||
fn new(path: &Path) -> Result<Self> {
|
fn new(path: &Path) -> Result<Self> {
|
||||||
Ok(UpdateFile {
|
Ok(UpdateFile { reader: BufReader::new(File::open(path)?) })
|
||||||
reader: BufReader::new(File::open(path)?),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -256,7 +248,8 @@ impl Iterator for UpdateFile {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fs::File, io::BufReader};
|
use std::fs::File;
|
||||||
|
use std::io::BufReader;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -287,11 +280,7 @@ pub(crate) mod test {
|
|||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[2..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[2..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
|
||||||
let update_file = update_files
|
let update_file = update_files.remove(1).unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.remove(1)
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(update_file), @"7b8889539b669c7b9ddba448bafa385d");
|
||||||
|
|
||||||
// keys
|
// keys
|
||||||
@ -319,11 +308,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"9896a66a399c24a0f4f6a3c8563cd14a");
|
||||||
let documents = products
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
|
|
||||||
@ -338,11 +323,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"d0dc7efd1360f95fce57d7931a70b7c9");
|
||||||
let documents = movies
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 200);
|
assert_eq!(documents.len(), 200);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
||||||
|
|
||||||
@ -357,11 +338,7 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"59c8e30c2022897987ea7b4394167b06");
|
||||||
let documents = spells
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
.documents()
|
|
||||||
.unwrap()
|
|
||||||
.collect::<Result<Vec<_>>>()
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
use std::{
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
collections::{BTreeMap, BTreeSet},
|
use std::marker::PhantomData;
|
||||||
marker::PhantomData,
|
|
||||||
};
|
|
||||||
|
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
|
|
||||||
|
@ -2,11 +2,9 @@ use serde::Deserialize;
|
|||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::{
|
use super::errors::ResponseError;
|
||||||
errors::ResponseError,
|
use super::meta::IndexUid;
|
||||||
meta::IndexUid,
|
use super::settings::{Settings, Unchecked};
|
||||||
settings::{Settings, Unchecked},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub type TaskId = u32;
|
pub type TaskId = u32;
|
||||||
pub type BatchId = u32;
|
pub type BatchId = u32;
|
||||||
@ -117,20 +115,16 @@ impl Task {
|
|||||||
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
||||||
pub fn is_finished(&self) -> bool {
|
pub fn is_finished(&self) -> bool {
|
||||||
self.events.last().map_or(false, |event| {
|
self.events.last().map_or(false, |event| {
|
||||||
matches!(
|
matches!(event, TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. })
|
||||||
event,
|
|
||||||
TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. }
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the content_uuid of the `Task` if there is one.
|
/// Return the content_uuid of the `Task` if there is one.
|
||||||
pub fn get_content_uuid(&self) -> Option<Uuid> {
|
pub fn get_content_uuid(&self) -> Option<Uuid> {
|
||||||
match self {
|
match self {
|
||||||
Task {
|
Task { content: TaskContent::DocumentAddition { content_uuid, .. }, .. } => {
|
||||||
content: TaskContent::DocumentAddition { content_uuid, .. },
|
Some(*content_uuid)
|
||||||
..
|
}
|
||||||
} => Some(*content_uuid),
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -184,31 +178,19 @@ pub struct TaskView {
|
|||||||
pub duration: Option<Duration>,
|
pub duration: Option<Duration>,
|
||||||
#[cfg_attr(test, serde(serialize_with = "time::serde::rfc3339::serialize"))]
|
#[cfg_attr(test, serde(serialize_with = "time::serde::rfc3339::serialize"))]
|
||||||
pub enqueued_at: OffsetDateTime,
|
pub enqueued_at: OffsetDateTime,
|
||||||
#[cfg_attr(
|
#[cfg_attr(test, serde(serialize_with = "time::serde::rfc3339::option::serialize"))]
|
||||||
test,
|
|
||||||
serde(serialize_with = "time::serde::rfc3339::option::serialize")
|
|
||||||
)]
|
|
||||||
pub started_at: Option<OffsetDateTime>,
|
pub started_at: Option<OffsetDateTime>,
|
||||||
#[cfg_attr(
|
#[cfg_attr(test, serde(serialize_with = "time::serde::rfc3339::option::serialize"))]
|
||||||
test,
|
|
||||||
serde(serialize_with = "time::serde::rfc3339::option::serialize")
|
|
||||||
)]
|
|
||||||
pub finished_at: Option<OffsetDateTime>,
|
pub finished_at: Option<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Task> for TaskView {
|
impl From<Task> for TaskView {
|
||||||
fn from(task: Task) -> Self {
|
fn from(task: Task) -> Self {
|
||||||
let index_uid = task.index_uid().map(String::from);
|
let index_uid = task.index_uid().map(String::from);
|
||||||
let Task {
|
let Task { id, content, events } = task;
|
||||||
id,
|
|
||||||
content,
|
|
||||||
events,
|
|
||||||
} = task;
|
|
||||||
|
|
||||||
let (task_type, mut details) = match content {
|
let (task_type, mut details) = match content {
|
||||||
TaskContent::DocumentAddition {
|
TaskContent::DocumentAddition { documents_count, .. } => {
|
||||||
documents_count, ..
|
|
||||||
} => {
|
|
||||||
let details = TaskDetails::DocumentAddition {
|
let details = TaskDetails::DocumentAddition {
|
||||||
received_documents: documents_count,
|
received_documents: documents_count,
|
||||||
indexed_documents: None,
|
indexed_documents: None,
|
||||||
@ -216,47 +198,32 @@ impl From<Task> for TaskView {
|
|||||||
|
|
||||||
(TaskType::DocumentAdditionOrUpdate, Some(details))
|
(TaskType::DocumentAdditionOrUpdate, Some(details))
|
||||||
}
|
}
|
||||||
TaskContent::DocumentDeletion {
|
TaskContent::DocumentDeletion { deletion: DocumentDeletion::Ids(ids), .. } => (
|
||||||
deletion: DocumentDeletion::Ids(ids),
|
|
||||||
..
|
|
||||||
} => (
|
|
||||||
TaskType::DocumentDeletion,
|
TaskType::DocumentDeletion,
|
||||||
Some(TaskDetails::DocumentDeletion {
|
Some(TaskDetails::DocumentDeletion {
|
||||||
received_document_ids: ids.len(),
|
received_document_ids: ids.len(),
|
||||||
deleted_documents: None,
|
deleted_documents: None,
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
TaskContent::DocumentDeletion {
|
TaskContent::DocumentDeletion { deletion: DocumentDeletion::Clear, .. } => (
|
||||||
deletion: DocumentDeletion::Clear,
|
|
||||||
..
|
|
||||||
} => (
|
|
||||||
TaskType::DocumentDeletion,
|
TaskType::DocumentDeletion,
|
||||||
Some(TaskDetails::ClearAll {
|
Some(TaskDetails::ClearAll { deleted_documents: None }),
|
||||||
deleted_documents: None,
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
TaskContent::IndexDeletion { .. } => (
|
|
||||||
TaskType::IndexDeletion,
|
|
||||||
Some(TaskDetails::ClearAll {
|
|
||||||
deleted_documents: None,
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
TaskContent::SettingsUpdate { settings, .. } => (
|
|
||||||
TaskType::SettingsUpdate,
|
|
||||||
Some(TaskDetails::Settings { settings }),
|
|
||||||
),
|
|
||||||
TaskContent::IndexCreation { primary_key, .. } => (
|
|
||||||
TaskType::IndexCreation,
|
|
||||||
Some(TaskDetails::IndexInfo { primary_key }),
|
|
||||||
),
|
|
||||||
TaskContent::IndexUpdate { primary_key, .. } => (
|
|
||||||
TaskType::IndexUpdate,
|
|
||||||
Some(TaskDetails::IndexInfo { primary_key }),
|
|
||||||
),
|
|
||||||
TaskContent::Dump { uid } => (
|
|
||||||
TaskType::DumpCreation,
|
|
||||||
Some(TaskDetails::Dump { dump_uid: uid }),
|
|
||||||
),
|
),
|
||||||
|
TaskContent::IndexDeletion { .. } => {
|
||||||
|
(TaskType::IndexDeletion, Some(TaskDetails::ClearAll { deleted_documents: None }))
|
||||||
|
}
|
||||||
|
TaskContent::SettingsUpdate { settings, .. } => {
|
||||||
|
(TaskType::SettingsUpdate, Some(TaskDetails::Settings { settings }))
|
||||||
|
}
|
||||||
|
TaskContent::IndexCreation { primary_key, .. } => {
|
||||||
|
(TaskType::IndexCreation, Some(TaskDetails::IndexInfo { primary_key }))
|
||||||
|
}
|
||||||
|
TaskContent::IndexUpdate { primary_key, .. } => {
|
||||||
|
(TaskType::IndexUpdate, Some(TaskDetails::IndexInfo { primary_key }))
|
||||||
|
}
|
||||||
|
TaskContent::Dump { uid } => {
|
||||||
|
(TaskType::DumpCreation, Some(TaskDetails::Dump { dump_uid: uid }))
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// An event always has at least one event: "Created"
|
// An event always has at least one event: "Created"
|
||||||
@ -267,36 +234,20 @@ impl From<Task> for TaskView {
|
|||||||
TaskEvent::Succeeded { timestamp, result } => {
|
TaskEvent::Succeeded { timestamp, result } => {
|
||||||
match (result, &mut details) {
|
match (result, &mut details) {
|
||||||
(
|
(
|
||||||
TaskResult::DocumentAddition {
|
TaskResult::DocumentAddition { indexed_documents: num, .. },
|
||||||
indexed_documents: num,
|
Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }),
|
||||||
..
|
|
||||||
},
|
|
||||||
Some(TaskDetails::DocumentAddition {
|
|
||||||
ref mut indexed_documents,
|
|
||||||
..
|
|
||||||
}),
|
|
||||||
) => {
|
) => {
|
||||||
indexed_documents.replace(*num);
|
indexed_documents.replace(*num);
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
TaskResult::DocumentDeletion {
|
TaskResult::DocumentDeletion { deleted_documents: docs, .. },
|
||||||
deleted_documents: docs,
|
Some(TaskDetails::DocumentDeletion { ref mut deleted_documents, .. }),
|
||||||
..
|
|
||||||
},
|
|
||||||
Some(TaskDetails::DocumentDeletion {
|
|
||||||
ref mut deleted_documents,
|
|
||||||
..
|
|
||||||
}),
|
|
||||||
) => {
|
) => {
|
||||||
deleted_documents.replace(*docs);
|
deleted_documents.replace(*docs);
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
TaskResult::ClearAll {
|
TaskResult::ClearAll { deleted_documents: docs },
|
||||||
deleted_documents: docs,
|
Some(TaskDetails::ClearAll { ref mut deleted_documents }),
|
||||||
},
|
|
||||||
Some(TaskDetails::ClearAll {
|
|
||||||
ref mut deleted_documents,
|
|
||||||
}),
|
|
||||||
) => {
|
) => {
|
||||||
deleted_documents.replace(*docs);
|
deleted_documents.replace(*docs);
|
||||||
}
|
}
|
||||||
@ -306,22 +257,13 @@ impl From<Task> for TaskView {
|
|||||||
}
|
}
|
||||||
TaskEvent::Failed { timestamp, error } => {
|
TaskEvent::Failed { timestamp, error } => {
|
||||||
match details {
|
match details {
|
||||||
Some(TaskDetails::DocumentDeletion {
|
Some(TaskDetails::DocumentDeletion { ref mut deleted_documents, .. }) => {
|
||||||
ref mut deleted_documents,
|
|
||||||
..
|
|
||||||
}) => {
|
|
||||||
deleted_documents.replace(0);
|
deleted_documents.replace(0);
|
||||||
}
|
}
|
||||||
Some(TaskDetails::ClearAll {
|
Some(TaskDetails::ClearAll { ref mut deleted_documents, .. }) => {
|
||||||
ref mut deleted_documents,
|
|
||||||
..
|
|
||||||
}) => {
|
|
||||||
deleted_documents.replace(0);
|
deleted_documents.replace(0);
|
||||||
}
|
}
|
||||||
Some(TaskDetails::DocumentAddition {
|
Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }) => {
|
||||||
ref mut indexed_documents,
|
|
||||||
..
|
|
||||||
}) => {
|
|
||||||
indexed_documents.replace(0);
|
indexed_documents.replace(0);
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
@ -400,10 +342,7 @@ pub enum TaskStatus {
|
|||||||
#[allow(clippy::large_enum_variant)]
|
#[allow(clippy::large_enum_variant)]
|
||||||
pub enum TaskDetails {
|
pub enum TaskDetails {
|
||||||
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
||||||
DocumentAddition {
|
DocumentAddition { received_documents: usize, indexed_documents: Option<u64> },
|
||||||
received_documents: usize,
|
|
||||||
indexed_documents: Option<u64>,
|
|
||||||
},
|
|
||||||
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
||||||
Settings {
|
Settings {
|
||||||
#[cfg_attr(test, serde(flatten))]
|
#[cfg_attr(test, serde(flatten))]
|
||||||
@ -412,10 +351,7 @@ pub enum TaskDetails {
|
|||||||
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
||||||
IndexInfo { primary_key: Option<String> },
|
IndexInfo { primary_key: Option<String> },
|
||||||
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
||||||
DocumentDeletion {
|
DocumentDeletion { received_document_ids: usize, deleted_documents: Option<u64> },
|
||||||
received_document_ids: usize,
|
|
||||||
deleted_documents: Option<u64>,
|
|
||||||
},
|
|
||||||
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
||||||
ClearAll { deleted_documents: Option<u64> },
|
ClearAll { deleted_documents: Option<u64> },
|
||||||
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
||||||
|
@ -1,19 +1,15 @@
|
|||||||
use std::{
|
use std::fs::{self, File};
|
||||||
fs::{self, File},
|
use std::io::{BufRead, BufReader};
|
||||||
io::{BufRead, BufReader},
|
use std::path::Path;
|
||||||
path::Path,
|
use std::str::FromStr;
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
|
pub use meilisearch_types::milli;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{Error, IndexMetadata, Result, Version};
|
|
||||||
|
|
||||||
pub use meilisearch_types::milli;
|
|
||||||
|
|
||||||
use super::Document;
|
use super::Document;
|
||||||
|
use crate::{Error, IndexMetadata, Result, Version};
|
||||||
|
|
||||||
pub type Metadata = crate::Metadata;
|
pub type Metadata = crate::Metadata;
|
||||||
|
|
||||||
@ -89,11 +85,7 @@ impl V6Reader {
|
|||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
if entry.file_type()?.is_dir() {
|
if entry.file_type()?.is_dir() {
|
||||||
let index = V6IndexReader::new(
|
let index = V6IndexReader::new(
|
||||||
entry
|
entry.file_name().to_str().ok_or(Error::BadIndexName)?.to_string(),
|
||||||
.file_name()
|
|
||||||
.to_str()
|
|
||||||
.ok_or(Error::BadIndexName)?
|
|
||||||
.to_string(),
|
|
||||||
&entry.path(),
|
&entry.path(),
|
||||||
)?;
|
)?;
|
||||||
Ok(Some(index))
|
Ok(Some(index))
|
||||||
@ -132,9 +124,7 @@ impl V6Reader {
|
|||||||
|
|
||||||
pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
|
pub fn keys(&mut self) -> Box<dyn Iterator<Item = Result<Key>> + '_> {
|
||||||
Box::new(
|
Box::new(
|
||||||
(&mut self.keys)
|
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
||||||
.lines()
|
|
||||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -145,9 +135,7 @@ pub struct UpdateFile {
|
|||||||
|
|
||||||
impl UpdateFile {
|
impl UpdateFile {
|
||||||
fn new(path: &Path) -> Result<Self> {
|
fn new(path: &Path) -> Result<Self> {
|
||||||
Ok(UpdateFile {
|
Ok(UpdateFile { reader: BufReader::new(File::open(path)?) })
|
||||||
reader: BufReader::new(File::open(path)?),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,20 +1,18 @@
|
|||||||
use std::{
|
use std::fs::{self, File};
|
||||||
fs::{self, File},
|
use std::io::{BufWriter, Write};
|
||||||
io::{BufWriter, Write},
|
use std::path::PathBuf;
|
||||||
path::PathBuf,
|
|
||||||
};
|
|
||||||
|
|
||||||
use flate2::{write::GzEncoder, Compression};
|
use flate2::write::GzEncoder;
|
||||||
use meilisearch_types::{
|
use flate2::Compression;
|
||||||
keys::Key,
|
use meilisearch_types::keys::Key;
|
||||||
settings::{Checked, Settings},
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
};
|
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{reader::Document, IndexMetadata, Metadata, Result, TaskDump, CURRENT_DUMP_VERSION};
|
use crate::reader::Document;
|
||||||
|
use crate::{IndexMetadata, Metadata, Result, TaskDump, CURRENT_DUMP_VERSION};
|
||||||
|
|
||||||
pub struct DumpWriter {
|
pub struct DumpWriter {
|
||||||
dir: TempDir,
|
dir: TempDir,
|
||||||
@ -36,10 +34,7 @@ impl DumpWriter {
|
|||||||
db_version: env!("CARGO_PKG_VERSION").to_string(),
|
db_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||||
dump_date: OffsetDateTime::now_utc(),
|
dump_date: OffsetDateTime::now_utc(),
|
||||||
};
|
};
|
||||||
fs::write(
|
fs::write(dir.path().join("metadata.json"), serde_json::to_string(&metadata)?)?;
|
||||||
dir.path().join("metadata.json"),
|
|
||||||
serde_json::to_string(&metadata)?,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
std::fs::create_dir(&dir.path().join("indexes"))?;
|
std::fs::create_dir(&dir.path().join("indexes"))?;
|
||||||
|
|
||||||
@ -77,9 +72,7 @@ pub struct KeyWriter {
|
|||||||
impl KeyWriter {
|
impl KeyWriter {
|
||||||
pub(crate) fn new(path: PathBuf) -> Result<Self> {
|
pub(crate) fn new(path: PathBuf) -> Result<Self> {
|
||||||
let keys = File::create(path.join("keys.jsonl"))?;
|
let keys = File::create(path.join("keys.jsonl"))?;
|
||||||
Ok(KeyWriter {
|
Ok(KeyWriter { keys: BufWriter::new(keys) })
|
||||||
keys: BufWriter::new(keys),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push_key(&mut self, key: &Key) -> Result<()> {
|
pub fn push_key(&mut self, key: &Key) -> Result<()> {
|
||||||
@ -107,10 +100,7 @@ impl TaskWriter {
|
|||||||
let update_files = path.join("update_files");
|
let update_files = path.join("update_files");
|
||||||
std::fs::create_dir(&update_files)?;
|
std::fs::create_dir(&update_files)?;
|
||||||
|
|
||||||
Ok(TaskWriter {
|
Ok(TaskWriter { queue: BufWriter::new(queue), update_files })
|
||||||
queue: BufWriter::new(queue),
|
|
||||||
update_files,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pushes tasks in the dump.
|
/// Pushes tasks in the dump.
|
||||||
@ -119,9 +109,7 @@ impl TaskWriter {
|
|||||||
self.queue.write_all(&serde_json::to_vec(task)?)?;
|
self.queue.write_all(&serde_json::to_vec(task)?)?;
|
||||||
self.queue.write_all(b"\n")?;
|
self.queue.write_all(b"\n")?;
|
||||||
|
|
||||||
Ok(UpdateFile::new(
|
Ok(UpdateFile::new(self.update_files.join(format!("{}.jsonl", task.uid))))
|
||||||
self.update_files.join(format!("{}.jsonl", task.uid)),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn flush(mut self) -> Result<()> {
|
pub fn flush(mut self) -> Result<()> {
|
||||||
@ -175,10 +163,7 @@ impl IndexWriter {
|
|||||||
let documents = File::create(path.join("documents.jsonl"))?;
|
let documents = File::create(path.join("documents.jsonl"))?;
|
||||||
let settings = File::create(path.join("settings.json"))?;
|
let settings = File::create(path.join("settings.json"))?;
|
||||||
|
|
||||||
Ok(IndexWriter {
|
Ok(IndexWriter { documents: BufWriter::new(documents), settings })
|
||||||
documents: BufWriter::new(documents),
|
|
||||||
settings,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push_document(&mut self, document: &Map<String, Value>) -> Result<()> {
|
pub fn push_document(&mut self, document: &Map<String, Value>) -> Result<()> {
|
||||||
@ -200,20 +185,20 @@ impl IndexWriter {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{fmt::Write, io::BufReader, path::Path, str::FromStr};
|
use std::fmt::Write;
|
||||||
|
use std::io::BufReader;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
use flate2::bufread::GzDecoder;
|
||||||
use meilisearch_types::settings::Unchecked;
|
use meilisearch_types::settings::Unchecked;
|
||||||
|
|
||||||
use crate::{
|
|
||||||
reader::Document,
|
|
||||||
test::{
|
|
||||||
create_test_api_keys, create_test_documents, create_test_dump,
|
|
||||||
create_test_instance_uid, create_test_settings, create_test_tasks,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::reader::Document;
|
||||||
|
use crate::test::{
|
||||||
|
create_test_api_keys, create_test_documents, create_test_dump, create_test_instance_uid,
|
||||||
|
create_test_settings, create_test_tasks,
|
||||||
|
};
|
||||||
|
|
||||||
fn create_directory_hierarchy(dir: &Path) -> String {
|
fn create_directory_hierarchy(dir: &Path) -> String {
|
||||||
let mut ret = String::new();
|
let mut ret = String::new();
|
||||||
@ -226,10 +211,8 @@ pub(crate) mod test {
|
|||||||
let mut ret = String::new();
|
let mut ret = String::new();
|
||||||
|
|
||||||
// the entries are not guarenteed to be returned in the same order thus we need to sort them.
|
// the entries are not guarenteed to be returned in the same order thus we need to sort them.
|
||||||
let mut entries = fs::read_dir(dir)
|
let mut entries =
|
||||||
.unwrap()
|
fs::read_dir(dir).unwrap().collect::<std::result::Result<Vec<_>, _>>().unwrap();
|
||||||
.collect::<std::result::Result<Vec<_>, _>>()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// I want the directories first and then sort by name.
|
// I want the directories first and then sort by name.
|
||||||
entries.sort_by(|a, b| {
|
entries.sort_by(|a, b| {
|
||||||
@ -317,18 +300,12 @@ pub(crate) mod test {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
let instance_uid = fs::read_to_string(dump_path.join("instance_uid.uuid")).unwrap();
|
let instance_uid = fs::read_to_string(dump_path.join("instance_uid.uuid")).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(Uuid::from_str(&instance_uid).unwrap(), create_test_instance_uid());
|
||||||
Uuid::from_str(&instance_uid).unwrap(),
|
|
||||||
create_test_instance_uid()
|
|
||||||
);
|
|
||||||
|
|
||||||
// ==== checking the index
|
// ==== checking the index
|
||||||
let docs = fs::read_to_string(dump_path.join("indexes/doggos/documents.jsonl")).unwrap();
|
let docs = fs::read_to_string(dump_path.join("indexes/doggos/documents.jsonl")).unwrap();
|
||||||
for (document, expected) in docs.lines().zip(create_test_documents()) {
|
for (document, expected) in docs.lines().zip(create_test_documents()) {
|
||||||
assert_eq!(
|
assert_eq!(serde_json::from_str::<Map<String, Value>>(document).unwrap(), expected);
|
||||||
serde_json::from_str::<Map<String, Value>>(document).unwrap(),
|
|
||||||
expected
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
let test_settings =
|
let test_settings =
|
||||||
fs::read_to_string(dump_path.join("indexes/doggos/settings.json")).unwrap();
|
fs::read_to_string(dump_path.join("indexes/doggos/settings.json")).unwrap();
|
||||||
@ -356,10 +333,8 @@ pub(crate) mod test {
|
|||||||
let path = dump_path.join(format!("tasks/update_files/{}.jsonl", expected.0.uid));
|
let path = dump_path.join(format!("tasks/update_files/{}.jsonl", expected.0.uid));
|
||||||
println!("trying to open {}", path.display());
|
println!("trying to open {}", path.display());
|
||||||
let update = fs::read_to_string(path).unwrap();
|
let update = fs::read_to_string(path).unwrap();
|
||||||
let documents: Vec<Document> = update
|
let documents: Vec<Document> =
|
||||||
.lines()
|
update.lines().map(|line| serde_json::from_str(line).unwrap()).collect();
|
||||||
.map(|line| serde_json::from_str(line).unwrap())
|
|
||||||
.collect();
|
|
||||||
assert_eq!(documents, expected_update);
|
assert_eq!(documents, expected_update);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,11 +5,12 @@ tasks affecting a single index into a [batch](crate::batch::Batch).
|
|||||||
The main function of the autobatcher is [`next_autobatch`].
|
The main function of the autobatcher is [`next_autobatch`].
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
use std::ops::ControlFlow::{self, Break, Continue};
|
||||||
|
|
||||||
use meilisearch_types::milli::update::IndexDocumentsMethod::{
|
use meilisearch_types::milli::update::IndexDocumentsMethod::{
|
||||||
self, ReplaceDocuments, UpdateDocuments,
|
self, ReplaceDocuments, UpdateDocuments,
|
||||||
};
|
};
|
||||||
use meilisearch_types::tasks::TaskId;
|
use meilisearch_types::tasks::TaskId;
|
||||||
use std::ops::ControlFlow::{self, Break, Continue};
|
|
||||||
|
|
||||||
use crate::KindWithContent;
|
use crate::KindWithContent;
|
||||||
|
|
||||||
@ -18,15 +19,10 @@ use crate::KindWithContent;
|
|||||||
///
|
///
|
||||||
/// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`]
|
/// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`]
|
||||||
enum AutobatchKind {
|
enum AutobatchKind {
|
||||||
DocumentImport {
|
DocumentImport { method: IndexDocumentsMethod, allow_index_creation: bool },
|
||||||
method: IndexDocumentsMethod,
|
|
||||||
allow_index_creation: bool,
|
|
||||||
},
|
|
||||||
DocumentDeletion,
|
DocumentDeletion,
|
||||||
DocumentClear,
|
DocumentClear,
|
||||||
Settings {
|
Settings { allow_index_creation: bool },
|
||||||
allow_index_creation: bool,
|
|
||||||
},
|
|
||||||
IndexCreation,
|
IndexCreation,
|
||||||
IndexDeletion,
|
IndexDeletion,
|
||||||
IndexUpdate,
|
IndexUpdate,
|
||||||
@ -47,23 +43,16 @@ impl AutobatchKind {
|
|||||||
impl From<KindWithContent> for AutobatchKind {
|
impl From<KindWithContent> for AutobatchKind {
|
||||||
fn from(kind: KindWithContent) -> Self {
|
fn from(kind: KindWithContent) -> Self {
|
||||||
match kind {
|
match kind {
|
||||||
KindWithContent::DocumentImport {
|
KindWithContent::DocumentImport { method, allow_index_creation, .. } => {
|
||||||
method,
|
AutobatchKind::DocumentImport { method, allow_index_creation }
|
||||||
allow_index_creation,
|
}
|
||||||
..
|
|
||||||
} => AutobatchKind::DocumentImport {
|
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
},
|
|
||||||
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
|
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
|
||||||
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
|
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
|
||||||
KindWithContent::Settings {
|
KindWithContent::Settings { allow_index_creation, is_deletion, .. } => {
|
||||||
allow_index_creation,
|
AutobatchKind::Settings {
|
||||||
is_deletion,
|
allow_index_creation: allow_index_creation && !is_deletion,
|
||||||
..
|
}
|
||||||
} => AutobatchKind::Settings {
|
}
|
||||||
allow_index_creation: allow_index_creation && !is_deletion,
|
|
||||||
},
|
|
||||||
KindWithContent::IndexDeletion { .. } => AutobatchKind::IndexDeletion,
|
KindWithContent::IndexDeletion { .. } => AutobatchKind::IndexDeletion,
|
||||||
KindWithContent::IndexCreation { .. } => AutobatchKind::IndexCreation,
|
KindWithContent::IndexCreation { .. } => AutobatchKind::IndexCreation,
|
||||||
KindWithContent::IndexUpdate { .. } => AutobatchKind::IndexUpdate,
|
KindWithContent::IndexUpdate { .. } => AutobatchKind::IndexUpdate,
|
||||||
@ -147,20 +136,11 @@ impl BatchKind {
|
|||||||
|
|
||||||
match AutobatchKind::from(kind) {
|
match AutobatchKind::from(kind) {
|
||||||
K::IndexCreation => (Break(BatchKind::IndexCreation { id: task_id }), true),
|
K::IndexCreation => (Break(BatchKind::IndexCreation { id: task_id }), true),
|
||||||
K::IndexDeletion => (
|
K::IndexDeletion => (Break(BatchKind::IndexDeletion { ids: vec![task_id] }), false),
|
||||||
Break(BatchKind::IndexDeletion { ids: vec![task_id] }),
|
|
||||||
false,
|
|
||||||
),
|
|
||||||
K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false),
|
K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false),
|
||||||
K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false),
|
K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false),
|
||||||
K::DocumentClear => (
|
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
|
||||||
Continue(BatchKind::DocumentClear { ids: vec![task_id] }),
|
K::DocumentImport { method, allow_index_creation } => (
|
||||||
false,
|
|
||||||
),
|
|
||||||
K::DocumentImport {
|
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
} => (
|
|
||||||
Continue(BatchKind::DocumentImport {
|
Continue(BatchKind::DocumentImport {
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
@ -168,19 +148,11 @@ impl BatchKind {
|
|||||||
}),
|
}),
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
),
|
),
|
||||||
K::DocumentDeletion => (
|
K::DocumentDeletion => {
|
||||||
Continue(BatchKind::DocumentDeletion {
|
(Continue(BatchKind::DocumentDeletion { deletion_ids: vec![task_id] }), false)
|
||||||
deletion_ids: vec![task_id],
|
}
|
||||||
}),
|
K::Settings { allow_index_creation } => (
|
||||||
false,
|
Continue(BatchKind::Settings { allow_index_creation, settings_ids: vec![task_id] }),
|
||||||
),
|
|
||||||
K::Settings {
|
|
||||||
allow_index_creation,
|
|
||||||
} => (
|
|
||||||
Continue(BatchKind::Settings {
|
|
||||||
allow_index_creation,
|
|
||||||
settings_ids: vec![task_id],
|
|
||||||
}),
|
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
@ -461,21 +433,17 @@ pub fn autobatch(
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::debug_snapshot;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use uuid::Uuid;
|
use crate::debug_snapshot;
|
||||||
|
|
||||||
fn autobatch_from(
|
fn autobatch_from(
|
||||||
index_already_exists: bool,
|
index_already_exists: bool,
|
||||||
input: impl IntoIterator<Item = KindWithContent>,
|
input: impl IntoIterator<Item = KindWithContent>,
|
||||||
) -> Option<(BatchKind, bool)> {
|
) -> Option<(BatchKind, bool)> {
|
||||||
autobatch(
|
autobatch(
|
||||||
input
|
input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind.into())).collect(),
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(id, kind)| (id as TaskId, kind.into()))
|
|
||||||
.collect(),
|
|
||||||
index_already_exists,
|
index_already_exists,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -499,9 +467,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn doc_clr() -> KindWithContent {
|
fn doc_clr() -> KindWithContent {
|
||||||
KindWithContent::DocumentClear {
|
KindWithContent::DocumentClear { index_uid: String::from("doggo") }
|
||||||
index_uid: String::from("doggo"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn settings(allow_index_creation: bool) -> KindWithContent {
|
fn settings(allow_index_creation: bool) -> KindWithContent {
|
||||||
@ -514,29 +480,19 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn idx_create() -> KindWithContent {
|
fn idx_create() -> KindWithContent {
|
||||||
KindWithContent::IndexCreation {
|
KindWithContent::IndexCreation { index_uid: String::from("doggo"), primary_key: None }
|
||||||
index_uid: String::from("doggo"),
|
|
||||||
primary_key: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn idx_update() -> KindWithContent {
|
fn idx_update() -> KindWithContent {
|
||||||
KindWithContent::IndexUpdate {
|
KindWithContent::IndexUpdate { index_uid: String::from("doggo"), primary_key: None }
|
||||||
index_uid: String::from("doggo"),
|
|
||||||
primary_key: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn idx_del() -> KindWithContent {
|
fn idx_del() -> KindWithContent {
|
||||||
KindWithContent::IndexDeletion {
|
KindWithContent::IndexDeletion { index_uid: String::from("doggo") }
|
||||||
index_uid: String::from("doggo"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn idx_swap() -> KindWithContent {
|
fn idx_swap() -> KindWithContent {
|
||||||
KindWithContent::IndexSwap {
|
KindWithContent::IndexSwap { swaps: vec![(String::from("doggo"), String::from("catto"))] }
|
||||||
swaps: vec![(String::from("doggo"), String::from("catto"))],
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -21,31 +21,26 @@ use std::collections::HashSet;
|
|||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::BufWriter;
|
use std::io::BufWriter;
|
||||||
|
|
||||||
use crate::utils::{self, swap_index_uid_in_task};
|
|
||||||
use crate::Query;
|
|
||||||
use crate::{autobatcher::BatchKind, Error, IndexScheduler, Result, TaskId};
|
|
||||||
|
|
||||||
use dump::IndexMetadata;
|
use dump::IndexMetadata;
|
||||||
use log::{debug, error, info};
|
use log::{debug, error, info};
|
||||||
|
use meilisearch_types::heed::{RoTxn, RwTxn};
|
||||||
use meilisearch_types::milli::documents::obkv_to_object;
|
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
|
||||||
use meilisearch_types::milli::update::IndexDocumentsConfig;
|
|
||||||
use meilisearch_types::milli::update::{
|
use meilisearch_types::milli::update::{
|
||||||
DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsMethod,
|
DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod,
|
||||||
};
|
Settings as MilliSettings,
|
||||||
use meilisearch_types::milli::{
|
|
||||||
self, documents::DocumentsBatchReader, update::Settings as MilliSettings, BEU32,
|
|
||||||
};
|
};
|
||||||
|
use meilisearch_types::milli::{self, BEU32};
|
||||||
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
|
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::{Details, Kind, KindWithContent, Status, Task};
|
use meilisearch_types::tasks::{Details, Kind, KindWithContent, Status, Task};
|
||||||
use meilisearch_types::{
|
use meilisearch_types::Index;
|
||||||
heed::{RoTxn, RwTxn},
|
|
||||||
Index,
|
|
||||||
};
|
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::autobatcher::BatchKind;
|
||||||
|
use crate::utils::{self, swap_index_uid_in_task};
|
||||||
|
use crate::{Error, IndexScheduler, Query, Result, TaskId};
|
||||||
|
|
||||||
/// Represents a combination of tasks that can all be processed at the same time.
|
/// Represents a combination of tasks that can all be processed at the same time.
|
||||||
///
|
///
|
||||||
/// A batch contains the set of tasks that it represents (accessible through
|
/// A batch contains the set of tasks that it represents (accessible through
|
||||||
@ -57,28 +52,11 @@ pub(crate) enum Batch {
|
|||||||
TaskDeletion(Task),
|
TaskDeletion(Task),
|
||||||
Snapshot(Vec<Task>),
|
Snapshot(Vec<Task>),
|
||||||
Dump(Task),
|
Dump(Task),
|
||||||
IndexOperation {
|
IndexOperation { op: IndexOperation, must_create_index: bool },
|
||||||
op: IndexOperation,
|
IndexCreation { index_uid: String, primary_key: Option<String>, task: Task },
|
||||||
must_create_index: bool,
|
IndexUpdate { index_uid: String, primary_key: Option<String>, task: Task },
|
||||||
},
|
IndexDeletion { index_uid: String, tasks: Vec<Task>, index_has_been_created: bool },
|
||||||
IndexCreation {
|
IndexSwap { task: Task },
|
||||||
index_uid: String,
|
|
||||||
primary_key: Option<String>,
|
|
||||||
task: Task,
|
|
||||||
},
|
|
||||||
IndexUpdate {
|
|
||||||
index_uid: String,
|
|
||||||
primary_key: Option<String>,
|
|
||||||
task: Task,
|
|
||||||
},
|
|
||||||
IndexDeletion {
|
|
||||||
index_uid: String,
|
|
||||||
tasks: Vec<Task>,
|
|
||||||
index_has_been_created: bool,
|
|
||||||
},
|
|
||||||
IndexSwap {
|
|
||||||
task: Task,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
||||||
@ -212,9 +190,7 @@ impl IndexScheduler {
|
|||||||
for task in &tasks {
|
for task in &tasks {
|
||||||
match task.kind {
|
match task.kind {
|
||||||
KindWithContent::DocumentImport {
|
KindWithContent::DocumentImport {
|
||||||
content_file,
|
content_file, documents_count, ..
|
||||||
documents_count,
|
|
||||||
..
|
|
||||||
} => {
|
} => {
|
||||||
documents_counts.push(documents_count);
|
documents_counts.push(documents_count);
|
||||||
content_files.push(content_file);
|
content_files.push(content_file);
|
||||||
@ -241,19 +217,15 @@ impl IndexScheduler {
|
|||||||
let mut documents = Vec::new();
|
let mut documents = Vec::new();
|
||||||
for task in &tasks {
|
for task in &tasks {
|
||||||
match task.kind {
|
match task.kind {
|
||||||
KindWithContent::DocumentDeletion {
|
KindWithContent::DocumentDeletion { ref documents_ids, .. } => {
|
||||||
ref documents_ids, ..
|
documents.extend_from_slice(documents_ids)
|
||||||
} => documents.extend_from_slice(documents_ids),
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(Batch::IndexOperation {
|
Ok(Some(Batch::IndexOperation {
|
||||||
op: IndexOperation::DocumentDeletion {
|
op: IndexOperation::DocumentDeletion { index_uid, documents, tasks },
|
||||||
index_uid,
|
|
||||||
documents,
|
|
||||||
tasks,
|
|
||||||
},
|
|
||||||
must_create_index,
|
must_create_index,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -263,49 +235,30 @@ impl IndexScheduler {
|
|||||||
let mut settings = Vec::new();
|
let mut settings = Vec::new();
|
||||||
for task in &tasks {
|
for task in &tasks {
|
||||||
match task.kind {
|
match task.kind {
|
||||||
KindWithContent::Settings {
|
KindWithContent::Settings { ref new_settings, is_deletion, .. } => {
|
||||||
ref new_settings,
|
settings.push((is_deletion, new_settings.clone()))
|
||||||
is_deletion,
|
}
|
||||||
..
|
|
||||||
} => settings.push((is_deletion, new_settings.clone())),
|
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(Batch::IndexOperation {
|
Ok(Some(Batch::IndexOperation {
|
||||||
op: IndexOperation::Settings {
|
op: IndexOperation::Settings { index_uid, settings, tasks },
|
||||||
index_uid,
|
|
||||||
settings,
|
|
||||||
tasks,
|
|
||||||
},
|
|
||||||
must_create_index,
|
must_create_index,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
BatchKind::ClearAndSettings {
|
BatchKind::ClearAndSettings { other, settings_ids, allow_index_creation } => {
|
||||||
other,
|
|
||||||
settings_ids,
|
|
||||||
allow_index_creation,
|
|
||||||
} => {
|
|
||||||
let (index_uid, settings, settings_tasks) = match self
|
let (index_uid, settings, settings_tasks) = match self
|
||||||
.create_next_batch_index(
|
.create_next_batch_index(
|
||||||
rtxn,
|
rtxn,
|
||||||
index_uid,
|
index_uid,
|
||||||
BatchKind::Settings {
|
BatchKind::Settings { settings_ids, allow_index_creation },
|
||||||
settings_ids,
|
|
||||||
allow_index_creation,
|
|
||||||
},
|
|
||||||
must_create_index,
|
must_create_index,
|
||||||
)?
|
)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
{
|
{
|
||||||
Batch::IndexOperation {
|
Batch::IndexOperation {
|
||||||
op:
|
op: IndexOperation::Settings { index_uid, settings, tasks, .. },
|
||||||
IndexOperation::Settings {
|
|
||||||
index_uid,
|
|
||||||
settings,
|
|
||||||
tasks,
|
|
||||||
..
|
|
||||||
},
|
|
||||||
..
|
..
|
||||||
} => (index_uid, settings, tasks),
|
} => (index_uid, settings, tasks),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
@ -345,21 +298,14 @@ impl IndexScheduler {
|
|||||||
let settings = self.create_next_batch_index(
|
let settings = self.create_next_batch_index(
|
||||||
rtxn,
|
rtxn,
|
||||||
index_uid.clone(),
|
index_uid.clone(),
|
||||||
BatchKind::Settings {
|
BatchKind::Settings { settings_ids, allow_index_creation },
|
||||||
settings_ids,
|
|
||||||
allow_index_creation,
|
|
||||||
},
|
|
||||||
must_create_index,
|
must_create_index,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let document_import = self.create_next_batch_index(
|
let document_import = self.create_next_batch_index(
|
||||||
rtxn,
|
rtxn,
|
||||||
index_uid.clone(),
|
index_uid.clone(),
|
||||||
BatchKind::DocumentImport {
|
BatchKind::DocumentImport { method, allow_index_creation, import_ids },
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
import_ids,
|
|
||||||
},
|
|
||||||
must_create_index,
|
must_create_index,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@ -377,12 +323,7 @@ impl IndexScheduler {
|
|||||||
..
|
..
|
||||||
}),
|
}),
|
||||||
Some(Batch::IndexOperation {
|
Some(Batch::IndexOperation {
|
||||||
op:
|
op: IndexOperation::Settings { settings, tasks: settings_tasks, .. },
|
||||||
IndexOperation::Settings {
|
|
||||||
settings,
|
|
||||||
tasks: settings_tasks,
|
|
||||||
..
|
|
||||||
},
|
|
||||||
..
|
..
|
||||||
}),
|
}),
|
||||||
) => Ok(Some(Batch::IndexOperation {
|
) => Ok(Some(Batch::IndexOperation {
|
||||||
@ -404,17 +345,12 @@ impl IndexScheduler {
|
|||||||
BatchKind::IndexCreation { id } => {
|
BatchKind::IndexCreation { id } => {
|
||||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
let (index_uid, primary_key) = match &task.kind {
|
let (index_uid, primary_key) = match &task.kind {
|
||||||
KindWithContent::IndexCreation {
|
KindWithContent::IndexCreation { index_uid, primary_key } => {
|
||||||
index_uid,
|
(index_uid.clone(), primary_key.clone())
|
||||||
primary_key,
|
}
|
||||||
} => (index_uid.clone(), primary_key.clone()),
|
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
Ok(Some(Batch::IndexCreation {
|
Ok(Some(Batch::IndexCreation { index_uid, primary_key, task }))
|
||||||
index_uid,
|
|
||||||
primary_key,
|
|
||||||
task,
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
BatchKind::IndexUpdate { id } => {
|
BatchKind::IndexUpdate { id } => {
|
||||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
@ -422,11 +358,7 @@ impl IndexScheduler {
|
|||||||
KindWithContent::IndexUpdate { primary_key, .. } => primary_key.clone(),
|
KindWithContent::IndexUpdate { primary_key, .. } => primary_key.clone(),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
Ok(Some(Batch::IndexUpdate {
|
Ok(Some(Batch::IndexUpdate { index_uid, primary_key, task }))
|
||||||
index_uid,
|
|
||||||
primary_key,
|
|
||||||
task,
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
BatchKind::IndexDeletion { ids } => Ok(Some(Batch::IndexDeletion {
|
BatchKind::IndexDeletion { ids } => Ok(Some(Batch::IndexDeletion {
|
||||||
index_uid,
|
index_uid,
|
||||||
@ -453,17 +385,14 @@ impl IndexScheduler {
|
|||||||
// 1. we get the last task to cancel.
|
// 1. we get the last task to cancel.
|
||||||
if let Some(task_id) = to_cancel.max() {
|
if let Some(task_id) = to_cancel.max() {
|
||||||
return Ok(Some(Batch::TaskCancelation(
|
return Ok(Some(Batch::TaskCancelation(
|
||||||
self.get_task(rtxn, task_id)?
|
self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?,
|
||||||
.ok_or(Error::CorruptedTaskQueue)?,
|
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. we get the next task to delete
|
// 2. we get the next task to delete
|
||||||
let to_delete = self.get_kind(rtxn, Kind::TaskDeletion)? & enqueued;
|
let to_delete = self.get_kind(rtxn, Kind::TaskDeletion)? & enqueued;
|
||||||
if let Some(task_id) = to_delete.min() {
|
if let Some(task_id) = to_delete.min() {
|
||||||
let task = self
|
let task = self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
.get_task(rtxn, task_id)?
|
|
||||||
.ok_or(Error::CorruptedTaskQueue)?;
|
|
||||||
|
|
||||||
return Ok(Some(Batch::TaskDeletion(task)));
|
return Ok(Some(Batch::TaskDeletion(task)));
|
||||||
}
|
}
|
||||||
@ -471,25 +400,20 @@ impl IndexScheduler {
|
|||||||
// 3. we batch the snapshot.
|
// 3. we batch the snapshot.
|
||||||
let to_snapshot = self.get_kind(rtxn, Kind::Snapshot)? & enqueued;
|
let to_snapshot = self.get_kind(rtxn, Kind::Snapshot)? & enqueued;
|
||||||
if !to_snapshot.is_empty() {
|
if !to_snapshot.is_empty() {
|
||||||
return Ok(Some(Batch::Snapshot(
|
return Ok(Some(Batch::Snapshot(self.get_existing_tasks(rtxn, to_snapshot)?)));
|
||||||
self.get_existing_tasks(rtxn, to_snapshot)?,
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. we batch the dumps.
|
// 4. we batch the dumps.
|
||||||
let to_dump = self.get_kind(rtxn, Kind::DumpExport)? & enqueued;
|
let to_dump = self.get_kind(rtxn, Kind::DumpExport)? & enqueued;
|
||||||
if let Some(to_dump) = to_dump.min() {
|
if let Some(to_dump) = to_dump.min() {
|
||||||
return Ok(Some(Batch::Dump(
|
return Ok(Some(Batch::Dump(
|
||||||
self.get_task(rtxn, to_dump)?
|
self.get_task(rtxn, to_dump)?.ok_or(Error::CorruptedTaskQueue)?,
|
||||||
.ok_or(Error::CorruptedTaskQueue)?,
|
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5. We take the next task and try to batch all the tasks associated with this index.
|
// 5. We take the next task and try to batch all the tasks associated with this index.
|
||||||
if let Some(task_id) = enqueued.min() {
|
if let Some(task_id) = enqueued.min() {
|
||||||
let task = self
|
let task = self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
.get_task(rtxn, task_id)?
|
|
||||||
.ok_or(Error::CorruptedTaskQueue)?;
|
|
||||||
|
|
||||||
// This is safe because all the remaining task are associated with
|
// This is safe because all the remaining task are associated with
|
||||||
// AT LEAST one index. We can use the right or left one it doesn't
|
// AT LEAST one index. We can use the right or left one it doesn't
|
||||||
@ -500,11 +424,7 @@ impl IndexScheduler {
|
|||||||
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
||||||
|
|
||||||
// If autobatching is disabled we only take one task at a time.
|
// If autobatching is disabled we only take one task at a time.
|
||||||
let tasks_limit = if self.autobatching_enabled {
|
let tasks_limit = if self.autobatching_enabled { usize::MAX } else { 1 };
|
||||||
usize::MAX
|
|
||||||
} else {
|
|
||||||
1
|
|
||||||
};
|
|
||||||
|
|
||||||
let enqueued = index_tasks
|
let enqueued = index_tasks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -716,10 +636,7 @@ impl IndexScheduler {
|
|||||||
task.status = Status::Succeeded;
|
task.status = Status::Succeeded;
|
||||||
Ok(vec![task])
|
Ok(vec![task])
|
||||||
}
|
}
|
||||||
Batch::IndexOperation {
|
Batch::IndexOperation { op, must_create_index } => {
|
||||||
op,
|
|
||||||
must_create_index,
|
|
||||||
} => {
|
|
||||||
let index_uid = op.index_uid();
|
let index_uid = op.index_uid();
|
||||||
let index = if must_create_index {
|
let index = if must_create_index {
|
||||||
// create the index if it doesn't already exist
|
// create the index if it doesn't already exist
|
||||||
@ -738,26 +655,14 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
Batch::IndexCreation {
|
Batch::IndexCreation { index_uid, primary_key, task } => {
|
||||||
index_uid,
|
|
||||||
primary_key,
|
|
||||||
task,
|
|
||||||
} => {
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
let mut wtxn = self.env.write_txn()?;
|
||||||
self.index_mapper.create_index(&mut wtxn, &index_uid)?;
|
self.index_mapper.create_index(&mut wtxn, &index_uid)?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
self.process_batch(Batch::IndexUpdate {
|
self.process_batch(Batch::IndexUpdate { index_uid, primary_key, task })
|
||||||
index_uid,
|
|
||||||
primary_key,
|
|
||||||
task,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
Batch::IndexUpdate {
|
Batch::IndexUpdate { index_uid, primary_key, mut task } => {
|
||||||
index_uid,
|
|
||||||
primary_key,
|
|
||||||
mut task,
|
|
||||||
} => {
|
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
let index = self.index_mapper.index(&rtxn, &index_uid)?;
|
let index = self.index_mapper.index(&rtxn, &index_uid)?;
|
||||||
|
|
||||||
@ -781,11 +686,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(vec![task])
|
Ok(vec![task])
|
||||||
}
|
}
|
||||||
Batch::IndexDeletion {
|
Batch::IndexDeletion { index_uid, index_has_been_created, mut tasks } => {
|
||||||
index_uid,
|
|
||||||
index_has_been_created,
|
|
||||||
mut tasks,
|
|
||||||
} => {
|
|
||||||
let wtxn = self.env.write_txn()?;
|
let wtxn = self.env.write_txn()?;
|
||||||
|
|
||||||
// it's possible that the index doesn't exist
|
// it's possible that the index doesn't exist
|
||||||
@ -807,9 +708,9 @@ impl IndexScheduler {
|
|||||||
for task in &mut tasks {
|
for task in &mut tasks {
|
||||||
task.status = Status::Succeeded;
|
task.status = Status::Succeeded;
|
||||||
task.details = match &task.kind {
|
task.details = match &task.kind {
|
||||||
KindWithContent::IndexDeletion { .. } => Some(Details::ClearAll {
|
KindWithContent::IndexDeletion { .. } => {
|
||||||
deleted_documents: Some(number_of_documents),
|
Some(Details::ClearAll { deleted_documents: Some(number_of_documents) })
|
||||||
}),
|
}
|
||||||
otherwise => otherwise.default_finished_details(),
|
otherwise => otherwise.default_finished_details(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -855,9 +756,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
// 3. before_name -> new_name in the task's KindWithContent
|
// 3. before_name -> new_name in the task's KindWithContent
|
||||||
for task_id in &index_lhs_task_ids | &index_rhs_task_ids {
|
for task_id in &index_lhs_task_ids | &index_rhs_task_ids {
|
||||||
let mut task = self
|
let mut task = self.get_task(&wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
.get_task(&wtxn, task_id)?
|
|
||||||
.ok_or(Error::CorruptedTaskQueue)?;
|
|
||||||
swap_index_uid_in_task(&mut task, (lhs, rhs));
|
swap_index_uid_in_task(&mut task, (lhs, rhs));
|
||||||
self.all_tasks.put(wtxn, &BEU32::new(task_id), &task)?;
|
self.all_tasks.put(wtxn, &BEU32::new(task_id), &task)?;
|
||||||
}
|
}
|
||||||
@ -902,9 +801,7 @@ impl IndexScheduler {
|
|||||||
KindWithContent::DocumentClear { .. } => {
|
KindWithContent::DocumentClear { .. } => {
|
||||||
let count = if first_clear_found { 0 } else { count };
|
let count = if first_clear_found { 0 } else { count };
|
||||||
first_clear_found = true;
|
first_clear_found = true;
|
||||||
Some(Details::ClearAll {
|
Some(Details::ClearAll { deleted_documents: Some(count) })
|
||||||
deleted_documents: Some(count),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
otherwise => otherwise.default_details(),
|
otherwise => otherwise.default_details(),
|
||||||
};
|
};
|
||||||
@ -935,10 +832,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let config = IndexDocumentsConfig {
|
let config = IndexDocumentsConfig { update_method: method, ..Default::default() };
|
||||||
update_method: method,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut builder = milli::update::IndexDocuments::new(
|
let mut builder = milli::update::IndexDocuments::new(
|
||||||
index_wtxn,
|
index_wtxn,
|
||||||
@ -973,15 +867,11 @@ impl IndexScheduler {
|
|||||||
info!("document addition done: {:?}", addition);
|
info!("document addition done: {:?}", addition);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (task, (ret, count)) in tasks
|
for (task, (ret, count)) in
|
||||||
.iter_mut()
|
tasks.iter_mut().zip(results.into_iter().zip(documents_counts))
|
||||||
.zip(results.into_iter().zip(documents_counts))
|
|
||||||
{
|
{
|
||||||
match ret {
|
match ret {
|
||||||
Ok(DocumentAdditionResult {
|
Ok(DocumentAdditionResult { indexed_documents, number_of_documents }) => {
|
||||||
indexed_documents,
|
|
||||||
number_of_documents,
|
|
||||||
}) => {
|
|
||||||
task.status = Status::Succeeded;
|
task.status = Status::Succeeded;
|
||||||
task.details = Some(Details::DocumentAddition {
|
task.details = Some(Details::DocumentAddition {
|
||||||
received_documents: number_of_documents,
|
received_documents: number_of_documents,
|
||||||
@ -1001,19 +891,13 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
IndexOperation::DocumentDeletion {
|
IndexOperation::DocumentDeletion { index_uid: _, documents, mut tasks } => {
|
||||||
index_uid: _,
|
|
||||||
documents,
|
|
||||||
mut tasks,
|
|
||||||
} => {
|
|
||||||
let mut builder = milli::update::DeleteDocuments::new(index_wtxn, index)?;
|
let mut builder = milli::update::DeleteDocuments::new(index_wtxn, index)?;
|
||||||
documents.iter().for_each(|id| {
|
documents.iter().for_each(|id| {
|
||||||
builder.delete_external_id(id);
|
builder.delete_external_id(id);
|
||||||
});
|
});
|
||||||
|
|
||||||
let DocumentDeletionResult {
|
let DocumentDeletionResult { deleted_documents, .. } = builder.execute()?;
|
||||||
deleted_documents, ..
|
|
||||||
} = builder.execute()?;
|
|
||||||
|
|
||||||
for (task, documents) in tasks.iter_mut().zip(documents) {
|
for (task, documents) in tasks.iter_mut().zip(documents) {
|
||||||
task.status = Status::Succeeded;
|
task.status = Status::Succeeded;
|
||||||
@ -1025,11 +909,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
IndexOperation::Settings {
|
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
||||||
index_uid: _,
|
|
||||||
settings,
|
|
||||||
mut tasks,
|
|
||||||
} => {
|
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
// TODO merge the settings to only do *one* reindexation.
|
// TODO merge the settings to only do *one* reindexation.
|
||||||
for (task, (_, settings)) in tasks.iter_mut().zip(settings) {
|
for (task, (_, settings)) in tasks.iter_mut().zip(settings) {
|
||||||
@ -1105,11 +985,7 @@ impl IndexScheduler {
|
|||||||
let settings_tasks = self.apply_index_operation(
|
let settings_tasks = self.apply_index_operation(
|
||||||
index_wtxn,
|
index_wtxn,
|
||||||
index,
|
index,
|
||||||
IndexOperation::Settings {
|
IndexOperation::Settings { index_uid, settings, tasks: settings_tasks },
|
||||||
index_uid,
|
|
||||||
settings,
|
|
||||||
tasks: settings_tasks,
|
|
||||||
},
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut tasks = settings_tasks;
|
let mut tasks = settings_tasks;
|
||||||
@ -1139,9 +1015,7 @@ impl IndexScheduler {
|
|||||||
let mut affected_kinds = HashSet::new();
|
let mut affected_kinds = HashSet::new();
|
||||||
|
|
||||||
for task_id in to_delete_tasks.iter() {
|
for task_id in to_delete_tasks.iter() {
|
||||||
let task = self
|
let task = self.get_task(wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
.get_task(wtxn, task_id)?
|
|
||||||
.ok_or(Error::CorruptedTaskQueue)?;
|
|
||||||
if let Some(task_indexes) = task.indexes() {
|
if let Some(task_indexes) = task.indexes() {
|
||||||
affected_indexes.extend(task_indexes.into_iter().map(|x| x.to_owned()));
|
affected_indexes.extend(task_indexes.into_iter().map(|x| x.to_owned()));
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use meilisearch_types::error::{Code, ErrorCode};
|
use meilisearch_types::error::{Code, ErrorCode};
|
||||||
use meilisearch_types::heed;
|
use meilisearch_types::{heed, milli};
|
||||||
use meilisearch_types::milli;
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::TaskId;
|
use crate::TaskId;
|
||||||
|
@ -122,10 +122,7 @@ impl IndexMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Finally we remove the entry from the index map.
|
// Finally we remove the entry from the index map.
|
||||||
assert!(matches!(
|
assert!(matches!(index_map.write().unwrap().remove(&uuid), Some(BeingDeleted)));
|
||||||
index_map.write().unwrap().remove(&uuid),
|
|
||||||
Some(BeingDeleted)
|
|
||||||
));
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -183,8 +180,7 @@ impl IndexMapper {
|
|||||||
.iter(rtxn)?
|
.iter(rtxn)?
|
||||||
.map(|ret| {
|
.map(|ret| {
|
||||||
ret.map_err(Error::from).and_then(|(name, _)| {
|
ret.map_err(Error::from).and_then(|(name, _)| {
|
||||||
self.index(rtxn, name)
|
self.index(rtxn, name).map(|index| (name.to_string(), index))
|
||||||
.map(|index| (name.to_string(), index))
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -29,29 +29,28 @@ mod utils;
|
|||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
pub type TaskId = u32;
|
pub type TaskId = u32;
|
||||||
|
|
||||||
use dump::{KindDump, TaskDump, UpdateFile};
|
|
||||||
pub use error::Error;
|
|
||||||
use meilisearch_types::milli::documents::DocumentsBatchBuilder;
|
|
||||||
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
|
|
||||||
use utils::keep_tasks_within_datetimes;
|
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering::Relaxed};
|
use std::sync::atomic::AtomicBool;
|
||||||
|
use std::sync::atomic::Ordering::Relaxed;
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
|
|
||||||
|
use dump::{KindDump, TaskDump, UpdateFile};
|
||||||
|
pub use error::Error;
|
||||||
use file_store::FileStore;
|
use file_store::FileStore;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
|
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
||||||
|
use meilisearch_types::heed::{self, Database, Env};
|
||||||
use meilisearch_types::milli;
|
use meilisearch_types::milli;
|
||||||
|
use meilisearch_types::milli::documents::DocumentsBatchBuilder;
|
||||||
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
|
use meilisearch_types::milli::{CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
|
||||||
|
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
use synchronoise::SignalEvent;
|
use synchronoise::SignalEvent;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
use utils::keep_tasks_within_datetimes;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
|
||||||
use meilisearch_types::heed::{self, Database, Env};
|
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
|
||||||
use meilisearch_types::milli::{CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
|
|
||||||
|
|
||||||
use crate::index_mapper::IndexMapper;
|
use crate::index_mapper::IndexMapper;
|
||||||
|
|
||||||
type BEI128 = meilisearch_types::heed::zerocopy::I128<meilisearch_types::heed::byteorder::BE>;
|
type BEI128 = meilisearch_types::heed::zerocopy::I128<meilisearch_types::heed::byteorder::BE>;
|
||||||
@ -124,10 +123,7 @@ impl Query {
|
|||||||
pub fn with_index(self, index_uid: String) -> Self {
|
pub fn with_index(self, index_uid: String) -> Self {
|
||||||
let mut index_vec = self.index_uid.unwrap_or_default();
|
let mut index_vec = self.index_uid.unwrap_or_default();
|
||||||
index_vec.push(index_uid);
|
index_vec.push(index_uid);
|
||||||
Self {
|
Self { index_uid: Some(index_vec), ..self }
|
||||||
index_uid: Some(index_vec),
|
|
||||||
..self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -142,10 +138,7 @@ struct ProcessingTasks {
|
|||||||
impl ProcessingTasks {
|
impl ProcessingTasks {
|
||||||
/// Creates an empty `ProcessingAt` struct.
|
/// Creates an empty `ProcessingAt` struct.
|
||||||
fn new() -> ProcessingTasks {
|
fn new() -> ProcessingTasks {
|
||||||
ProcessingTasks {
|
ProcessingTasks { started_at: OffsetDateTime::now_utc(), processing: RoaringBitmap::new() }
|
||||||
started_at: OffsetDateTime::now_utc(),
|
|
||||||
processing: RoaringBitmap::new(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stores the currently processing tasks, and the date time at which it started.
|
/// Stores the currently processing tasks, and the date time at which it started.
|
||||||
@ -447,21 +440,11 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let tasks = self.get_existing_tasks(
|
let tasks = self.get_existing_tasks(
|
||||||
&rtxn,
|
&rtxn,
|
||||||
tasks
|
tasks.into_iter().rev().take(query.limit.unwrap_or(u32::MAX) as usize),
|
||||||
.into_iter()
|
|
||||||
.rev()
|
|
||||||
.take(query.limit.unwrap_or(u32::MAX) as usize),
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let ProcessingTasks {
|
let ProcessingTasks { started_at, processing, .. } =
|
||||||
started_at,
|
self.processing_tasks.read().map_err(|_| Error::CorruptedTaskQueue)?.clone();
|
||||||
processing,
|
|
||||||
..
|
|
||||||
} = self
|
|
||||||
.processing_tasks
|
|
||||||
.read()
|
|
||||||
.map_err(|_| Error::CorruptedTaskQueue)?
|
|
||||||
.clone();
|
|
||||||
|
|
||||||
let ret = tasks.into_iter();
|
let ret = tasks.into_iter();
|
||||||
if processing.is_empty() {
|
if processing.is_empty() {
|
||||||
@ -469,11 +452,9 @@ impl IndexScheduler {
|
|||||||
} else {
|
} else {
|
||||||
Ok(ret
|
Ok(ret
|
||||||
.map(|task| match processing.contains(task.uid) {
|
.map(|task| match processing.contains(task.uid) {
|
||||||
true => Task {
|
true => {
|
||||||
status: Status::Processing,
|
Task { status: Status::Processing, started_at: Some(started_at), ..task }
|
||||||
started_at: Some(started_at),
|
}
|
||||||
..task
|
|
||||||
},
|
|
||||||
false => task,
|
false => task,
|
||||||
})
|
})
|
||||||
.collect())
|
.collect())
|
||||||
@ -497,8 +478,7 @@ impl IndexScheduler {
|
|||||||
status: Status::Enqueued,
|
status: Status::Enqueued,
|
||||||
kind: kind.clone(),
|
kind: kind.clone(),
|
||||||
};
|
};
|
||||||
self.all_tasks
|
self.all_tasks.append(&mut wtxn, &BEU32::new(task.uid), &task)?;
|
||||||
.append(&mut wtxn, &BEU32::new(task.uid), &task)?;
|
|
||||||
|
|
||||||
if let Some(indexes) = task.indexes() {
|
if let Some(indexes) = task.indexes() {
|
||||||
for index in indexes {
|
for index in indexes {
|
||||||
@ -527,11 +507,7 @@ impl IndexScheduler {
|
|||||||
// we inform the processing tasks to stop (if necessary).
|
// we inform the processing tasks to stop (if necessary).
|
||||||
if let KindWithContent::TaskCancelation { tasks, .. } = kind {
|
if let KindWithContent::TaskCancelation { tasks, .. } = kind {
|
||||||
let tasks_to_cancel = RoaringBitmap::from_iter(tasks);
|
let tasks_to_cancel = RoaringBitmap::from_iter(tasks);
|
||||||
if self
|
if self.processing_tasks.read().unwrap().must_cancel_processing_tasks(&tasks_to_cancel)
|
||||||
.processing_tasks
|
|
||||||
.read()
|
|
||||||
.unwrap()
|
|
||||||
.must_cancel_processing_tasks(&tasks_to_cancel)
|
|
||||||
{
|
{
|
||||||
self.must_stop_processing.must_stop();
|
self.must_stop_processing.must_stop();
|
||||||
}
|
}
|
||||||
@ -601,16 +577,14 @@ impl IndexScheduler {
|
|||||||
KindDump::DocumentClear => KindWithContent::DocumentClear {
|
KindDump::DocumentClear => KindWithContent::DocumentClear {
|
||||||
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
||||||
},
|
},
|
||||||
KindDump::Settings {
|
KindDump::Settings { settings, is_deletion, allow_index_creation } => {
|
||||||
settings,
|
KindWithContent::Settings {
|
||||||
is_deletion,
|
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
||||||
allow_index_creation,
|
new_settings: settings,
|
||||||
} => KindWithContent::Settings {
|
is_deletion,
|
||||||
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
allow_index_creation,
|
||||||
new_settings: settings,
|
}
|
||||||
is_deletion,
|
}
|
||||||
allow_index_creation,
|
|
||||||
},
|
|
||||||
KindDump::IndexDeletion => KindWithContent::IndexDeletion {
|
KindDump::IndexDeletion => KindWithContent::IndexDeletion {
|
||||||
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
||||||
},
|
},
|
||||||
@ -629,21 +603,14 @@ impl IndexScheduler {
|
|||||||
KindDump::TasksDeletion { query, tasks } => {
|
KindDump::TasksDeletion { query, tasks } => {
|
||||||
KindWithContent::TaskDeletion { query, tasks }
|
KindWithContent::TaskDeletion { query, tasks }
|
||||||
}
|
}
|
||||||
KindDump::DumpExport {
|
KindDump::DumpExport { dump_uid, keys, instance_uid } => {
|
||||||
dump_uid,
|
KindWithContent::DumpExport { dump_uid, keys, instance_uid }
|
||||||
keys,
|
}
|
||||||
instance_uid,
|
|
||||||
} => KindWithContent::DumpExport {
|
|
||||||
dump_uid,
|
|
||||||
keys,
|
|
||||||
instance_uid,
|
|
||||||
},
|
|
||||||
KindDump::Snapshot => KindWithContent::Snapshot,
|
KindDump::Snapshot => KindWithContent::Snapshot,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
self.all_tasks
|
self.all_tasks.put(&mut wtxn, &BEU32::new(task.uid), &task)?;
|
||||||
.put(&mut wtxn, &BEU32::new(task.uid), &task)?;
|
|
||||||
|
|
||||||
if let Some(indexes) = task.indexes() {
|
if let Some(indexes) = task.indexes() {
|
||||||
for index in indexes {
|
for index in indexes {
|
||||||
@ -729,19 +696,12 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
// We reset the must_stop flag to be sure that we don't stop processing tasks
|
// We reset the must_stop flag to be sure that we don't stop processing tasks
|
||||||
self.must_stop_processing.reset();
|
self.must_stop_processing.reset();
|
||||||
self.processing_tasks
|
self.processing_tasks.write().unwrap().start_processing_at(started_at, processing_tasks);
|
||||||
.write()
|
|
||||||
.unwrap()
|
|
||||||
.start_processing_at(started_at, processing_tasks);
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
{
|
{
|
||||||
self.test_breakpoint_sdr
|
self.test_breakpoint_sdr.send(Breakpoint::BatchCreated).unwrap();
|
||||||
.send(Breakpoint::BatchCreated)
|
self.test_breakpoint_sdr.send(Breakpoint::BeforeProcessing).unwrap();
|
||||||
.unwrap();
|
|
||||||
self.test_breakpoint_sdr
|
|
||||||
.send(Breakpoint::BeforeProcessing)
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Process the tasks
|
// 2. Process the tasks
|
||||||
@ -781,16 +741,11 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.processing_tasks
|
self.processing_tasks.write().unwrap().stop_processing_at(finished_at);
|
||||||
.write()
|
|
||||||
.unwrap()
|
|
||||||
.stop_processing_at(finished_at);
|
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
self.test_breakpoint_sdr
|
self.test_breakpoint_sdr.send(Breakpoint::AfterProcessing).unwrap();
|
||||||
.send(Breakpoint::AfterProcessing)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
Ok(processed_tasks)
|
Ok(processed_tasks)
|
||||||
}
|
}
|
||||||
@ -812,16 +767,12 @@ mod tests {
|
|||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::snapshot::snapshot_index_scheduler;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::snapshot::snapshot_index_scheduler;
|
||||||
|
|
||||||
/// Return a `KindWithContent::IndexCreation` task
|
/// Return a `KindWithContent::IndexCreation` task
|
||||||
fn index_creation_task(index: &'static str, primary_key: &'static str) -> KindWithContent {
|
fn index_creation_task(index: &'static str, primary_key: &'static str) -> KindWithContent {
|
||||||
KindWithContent::IndexCreation {
|
KindWithContent::IndexCreation { index_uid: S(index), primary_key: Some(S(primary_key)) }
|
||||||
index_uid: S(index),
|
|
||||||
primary_key: Some(S(primary_key)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
/// Create a `KindWithContent::DocumentImport` task that imports documents.
|
/// Create a `KindWithContent::DocumentImport` task that imports documents.
|
||||||
///
|
///
|
||||||
@ -864,9 +815,7 @@ mod tests {
|
|||||||
}}"#
|
}}"#
|
||||||
);
|
);
|
||||||
|
|
||||||
let (_uuid, mut file) = index_scheduler
|
let (_uuid, mut file) = index_scheduler.create_update_file_with_uuid(file_uuid).unwrap();
|
||||||
.create_update_file_with_uuid(file_uuid)
|
|
||||||
.unwrap();
|
|
||||||
let documents_count =
|
let documents_count =
|
||||||
meilisearch_types::document_formats::read_json(content.as_bytes(), file.as_file_mut())
|
meilisearch_types::document_formats::read_json(content.as_bytes(), file.as_file_mut())
|
||||||
.unwrap() as u64;
|
.unwrap() as u64;
|
||||||
@ -890,10 +839,8 @@ mod tests {
|
|||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let index_scheduler_handle = IndexSchedulerHandle {
|
let index_scheduler_handle =
|
||||||
_tempdir: tempdir,
|
IndexSchedulerHandle { _tempdir: tempdir, test_breakpoint_rcv: receiver };
|
||||||
test_breakpoint_rcv: receiver,
|
|
||||||
};
|
|
||||||
|
|
||||||
(index_scheduler, index_scheduler_handle)
|
(index_scheduler, index_scheduler_handle)
|
||||||
}
|
}
|
||||||
@ -952,18 +899,12 @@ mod tests {
|
|||||||
fn insert_task_while_another_task_is_processing() {
|
fn insert_task_while_another_task_is_processing() {
|
||||||
let (index_scheduler, handle) = IndexScheduler::test(true);
|
let (index_scheduler, handle) = IndexScheduler::test(true);
|
||||||
|
|
||||||
index_scheduler
|
index_scheduler.register(index_creation_task("index_a", "id")).unwrap();
|
||||||
.register(index_creation_task("index_a", "id"))
|
|
||||||
.unwrap();
|
|
||||||
handle.wait_till(Breakpoint::BatchCreated);
|
handle.wait_till(Breakpoint::BatchCreated);
|
||||||
// while the task is processing can we register another task?
|
// while the task is processing can we register another task?
|
||||||
|
index_scheduler.register(index_creation_task("index_b", "id")).unwrap();
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(index_creation_task("index_b", "id"))
|
.register(KindWithContent::IndexDeletion { index_uid: S("index_a") })
|
||||||
.unwrap();
|
|
||||||
index_scheduler
|
|
||||||
.register(KindWithContent::IndexDeletion {
|
|
||||||
index_uid: S("index_a"),
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler));
|
snapshot!(snapshot_index_scheduler(&index_scheduler));
|
||||||
@ -976,21 +917,13 @@ mod tests {
|
|||||||
let (index_scheduler, handle) = IndexScheduler::test(true);
|
let (index_scheduler, handle) = IndexScheduler::test(true);
|
||||||
|
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexCreation {
|
.register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None })
|
||||||
index_uid: S("doggos"),
|
|
||||||
primary_key: None,
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexCreation {
|
.register(KindWithContent::IndexCreation { index_uid: S("cattos"), primary_key: None })
|
||||||
index_uid: S("cattos"),
|
|
||||||
primary_key: None,
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexDeletion {
|
.register(KindWithContent::IndexDeletion { index_uid: S("doggos") })
|
||||||
index_uid: S("doggos"),
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
handle.wait_till(Breakpoint::Start);
|
handle.wait_till(Breakpoint::Start);
|
||||||
@ -1011,25 +944,16 @@ mod tests {
|
|||||||
let (index_scheduler, handle) = IndexScheduler::test(false);
|
let (index_scheduler, handle) = IndexScheduler::test(false);
|
||||||
|
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexCreation {
|
.register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None })
|
||||||
index_uid: S("doggos"),
|
|
||||||
primary_key: None,
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::DocumentClear {
|
.register(KindWithContent::DocumentClear { index_uid: S("doggos") })
|
||||||
index_uid: S("doggos"),
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::DocumentClear {
|
.register(KindWithContent::DocumentClear { index_uid: S("doggos") })
|
||||||
index_uid: S("doggos"),
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::DocumentClear {
|
.register(KindWithContent::DocumentClear { index_uid: S("doggos") })
|
||||||
index_uid: S("doggos"),
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
handle.wait_till(Breakpoint::AfterProcessing);
|
handle.wait_till(Breakpoint::AfterProcessing);
|
||||||
@ -1211,10 +1135,7 @@ mod tests {
|
|||||||
}"#;
|
}"#;
|
||||||
|
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexCreation {
|
.register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None })
|
||||||
index_uid: S("doggos"),
|
|
||||||
primary_key: None,
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap();
|
let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap();
|
||||||
@ -1233,9 +1154,7 @@ mod tests {
|
|||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexDeletion {
|
.register(KindWithContent::IndexDeletion { index_uid: S("doggos") })
|
||||||
index_uid: S("doggos"),
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler));
|
snapshot!(snapshot_index_scheduler(&index_scheduler));
|
||||||
@ -1263,9 +1182,7 @@ mod tests {
|
|||||||
|
|
||||||
for name in index_names {
|
for name in index_names {
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::DocumentClear {
|
.register(KindWithContent::DocumentClear { index_uid: name.to_string() })
|
||||||
index_uid: name.to_string(),
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1308,10 +1225,7 @@ mod tests {
|
|||||||
|
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexSwap {
|
.register(KindWithContent::IndexSwap {
|
||||||
swaps: vec![
|
swaps: vec![("a".to_owned(), "b".to_owned()), ("c".to_owned(), "d".to_owned())],
|
||||||
("a".to_owned(), "b".to_owned()),
|
|
||||||
("c".to_owned(), "d".to_owned()),
|
|
||||||
],
|
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1319,9 +1233,7 @@ mod tests {
|
|||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_swap_processed");
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_swap_processed");
|
||||||
|
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexSwap {
|
.register(KindWithContent::IndexSwap { swaps: vec![("a".to_owned(), "c".to_owned())] })
|
||||||
swaps: vec![("a".to_owned(), "c".to_owned())],
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
handle.wait_till(Breakpoint::AfterProcessing);
|
handle.wait_till(Breakpoint::AfterProcessing);
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_swap_processed");
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_swap_processed");
|
||||||
@ -1353,9 +1265,7 @@ mod tests {
|
|||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
index_scheduler
|
index_scheduler
|
||||||
.register(KindWithContent::IndexDeletion {
|
.register(KindWithContent::IndexDeletion { index_uid: S("doggos") })
|
||||||
index_uid: S("doggos"),
|
|
||||||
})
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler));
|
snapshot!(snapshot_index_scheduler(&index_scheduler));
|
||||||
|
@ -1,16 +1,11 @@
|
|||||||
|
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
||||||
|
use meilisearch_types::heed::{Database, RoTxn};
|
||||||
use meilisearch_types::milli::{CboRoaringBitmapCodec, RoaringBitmapCodec, BEU32};
|
use meilisearch_types::milli::{CboRoaringBitmapCodec, RoaringBitmapCodec, BEU32};
|
||||||
use meilisearch_types::tasks::Details;
|
use meilisearch_types::tasks::{Details, Task};
|
||||||
use meilisearch_types::{
|
|
||||||
heed::{
|
|
||||||
types::{OwnedType, SerdeBincode, SerdeJson, Str},
|
|
||||||
Database, RoTxn,
|
|
||||||
},
|
|
||||||
tasks::Task,
|
|
||||||
};
|
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
|
||||||
use crate::BEI128;
|
use crate::index_mapper::IndexMapper;
|
||||||
use crate::{index_mapper::IndexMapper, IndexScheduler, Kind, Status};
|
use crate::{IndexScheduler, Kind, Status, BEI128};
|
||||||
|
|
||||||
pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
||||||
let IndexScheduler {
|
let IndexScheduler {
|
||||||
@ -37,9 +32,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
let mut snap = String::new();
|
let mut snap = String::new();
|
||||||
|
|
||||||
let processing_tasks = processing_tasks.read().unwrap().processing.clone();
|
let processing_tasks = processing_tasks.read().unwrap().processing.clone();
|
||||||
snap.push_str(&format!(
|
snap.push_str(&format!("### Autobatching Enabled = {autobatching_enabled}\n"));
|
||||||
"### Autobatching Enabled = {autobatching_enabled}\n"
|
|
||||||
));
|
|
||||||
snap.push_str("### Processing Tasks:\n");
|
snap.push_str("### Processing Tasks:\n");
|
||||||
snap.push_str(&snapshot_bitmap(&processing_tasks));
|
snap.push_str(&snapshot_bitmap(&processing_tasks));
|
||||||
snap.push_str("\n----------------------------------------------------------------------\n");
|
snap.push_str("\n----------------------------------------------------------------------\n");
|
||||||
@ -151,6 +144,7 @@ fn snapshot_task(task: &Task) -> String {
|
|||||||
snap.push('}');
|
snap.push('}');
|
||||||
snap
|
snap
|
||||||
}
|
}
|
||||||
|
|
||||||
fn snaphsot_details(d: &Details) -> String {
|
fn snaphsot_details(d: &Details) -> String {
|
||||||
match d {
|
match d {
|
||||||
Details::DocumentAddition {
|
Details::DocumentAddition {
|
||||||
@ -191,8 +185,7 @@ fn snaphsot_details(d: &Details) -> String {
|
|||||||
},
|
},
|
||||||
Details::IndexSwap { swaps } => {
|
Details::IndexSwap { swaps } => {
|
||||||
format!("{{ indexes: {swaps:?} }}")
|
format!("{{ indexes: {swaps:?} }}")
|
||||||
},
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -205,6 +198,7 @@ fn snapshot_status(rtxn: &RoTxn, db: Database<SerdeBincode<Status>, RoaringBitma
|
|||||||
}
|
}
|
||||||
snap
|
snap
|
||||||
}
|
}
|
||||||
|
|
||||||
fn snapshot_kind(rtxn: &RoTxn, db: Database<SerdeBincode<Kind>, RoaringBitmapCodec>) -> String {
|
fn snapshot_kind(rtxn: &RoTxn, db: Database<SerdeBincode<Kind>, RoaringBitmapCodec>) -> String {
|
||||||
let mut snap = String::new();
|
let mut snap = String::new();
|
||||||
let mut iter = db.iter(rtxn).unwrap();
|
let mut iter = db.iter(rtxn).unwrap();
|
||||||
@ -227,11 +221,6 @@ fn snapshot_index_tasks(rtxn: &RoTxn, db: Database<Str, RoaringBitmapCodec>) ->
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn snapshot_index_mapper(rtxn: &RoTxn, mapper: &IndexMapper) -> String {
|
fn snapshot_index_mapper(rtxn: &RoTxn, mapper: &IndexMapper) -> String {
|
||||||
let names = mapper
|
let names = mapper.indexes(rtxn).unwrap().into_iter().map(|(n, _)| n).collect::<Vec<_>>();
|
||||||
.indexes(rtxn)
|
|
||||||
.unwrap()
|
|
||||||
.into_iter()
|
|
||||||
.map(|(n, _)| n)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
format!("{names:?}")
|
format!("{names:?}")
|
||||||
}
|
}
|
||||||
|
@ -2,29 +2,22 @@
|
|||||||
|
|
||||||
use std::ops::Bound;
|
use std::ops::Bound;
|
||||||
|
|
||||||
use meilisearch_types::heed::types::OwnedType;
|
use meilisearch_types::heed::types::{DecodeIgnore, OwnedType};
|
||||||
use meilisearch_types::heed::Database;
|
use meilisearch_types::heed::{Database, RoTxn, RwTxn};
|
||||||
use meilisearch_types::heed::{types::DecodeIgnore, RoTxn, RwTxn};
|
|
||||||
use meilisearch_types::milli::{CboRoaringBitmapCodec, BEU32};
|
use meilisearch_types::milli::{CboRoaringBitmapCodec, BEU32};
|
||||||
|
use meilisearch_types::tasks::{Kind, KindWithContent, Status};
|
||||||
use roaring::{MultiOps, RoaringBitmap};
|
use roaring::{MultiOps, RoaringBitmap};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::{Error, IndexScheduler, Result, Task, TaskId, BEI128};
|
use crate::{Error, IndexScheduler, Result, Task, TaskId, BEI128};
|
||||||
use meilisearch_types::tasks::{Kind, KindWithContent, Status};
|
|
||||||
|
|
||||||
impl IndexScheduler {
|
impl IndexScheduler {
|
||||||
pub(crate) fn all_task_ids(&self, rtxn: &RoTxn) -> Result<RoaringBitmap> {
|
pub(crate) fn all_task_ids(&self, rtxn: &RoTxn) -> Result<RoaringBitmap> {
|
||||||
enum_iterator::all()
|
enum_iterator::all().map(|s| self.get_status(&rtxn, s)).union()
|
||||||
.map(|s| self.get_status(&rtxn, s))
|
|
||||||
.union()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn last_task_id(&self, rtxn: &RoTxn) -> Result<Option<TaskId>> {
|
pub(crate) fn last_task_id(&self, rtxn: &RoTxn) -> Result<Option<TaskId>> {
|
||||||
Ok(self
|
Ok(self.all_tasks.remap_data_type::<DecodeIgnore>().last(rtxn)?.map(|(k, _)| k.get() + 1))
|
||||||
.all_tasks
|
|
||||||
.remap_data_type::<DecodeIgnore>()
|
|
||||||
.last(rtxn)?
|
|
||||||
.map(|(k, _)| k.get() + 1))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn next_task_id(&self, rtxn: &RoTxn) -> Result<TaskId> {
|
pub(crate) fn next_task_id(&self, rtxn: &RoTxn) -> Result<TaskId> {
|
||||||
@ -45,16 +38,13 @@ impl IndexScheduler {
|
|||||||
tasks
|
tasks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|task_id| {
|
.map(|task_id| {
|
||||||
self.get_task(rtxn, task_id)
|
self.get_task(rtxn, task_id).and_then(|task| task.ok_or(Error::CorruptedTaskQueue))
|
||||||
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))
|
|
||||||
})
|
})
|
||||||
.collect::<Result<_>>()
|
.collect::<Result<_>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn update_task(&self, wtxn: &mut RwTxn, task: &Task) -> Result<()> {
|
pub(crate) fn update_task(&self, wtxn: &mut RwTxn, task: &Task) -> Result<()> {
|
||||||
let old_task = self
|
let old_task = self.get_task(wtxn, task.uid)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
.get_task(wtxn, task.uid)?
|
|
||||||
.ok_or(Error::CorruptedTaskQueue)?;
|
|
||||||
|
|
||||||
debug_assert_eq!(old_task.uid, task.uid);
|
debug_assert_eq!(old_task.uid, task.uid);
|
||||||
|
|
||||||
@ -85,19 +75,13 @@ impl IndexScheduler {
|
|||||||
"Cannot update a task's enqueued_at time"
|
"Cannot update a task's enqueued_at time"
|
||||||
);
|
);
|
||||||
if old_task.started_at != task.started_at {
|
if old_task.started_at != task.started_at {
|
||||||
assert!(
|
assert!(old_task.started_at.is_none(), "Cannot update a task's started_at time");
|
||||||
old_task.started_at.is_none(),
|
|
||||||
"Cannot update a task's started_at time"
|
|
||||||
);
|
|
||||||
if let Some(started_at) = task.started_at {
|
if let Some(started_at) = task.started_at {
|
||||||
insert_task_datetime(wtxn, self.started_at, started_at, task.uid)?;
|
insert_task_datetime(wtxn, self.started_at, started_at, task.uid)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if old_task.finished_at != task.finished_at {
|
if old_task.finished_at != task.finished_at {
|
||||||
assert!(
|
assert!(old_task.finished_at.is_none(), "Cannot update a task's finished_at time");
|
||||||
old_task.finished_at.is_none(),
|
|
||||||
"Cannot update a task's finished_at time"
|
|
||||||
);
|
|
||||||
if let Some(finished_at) = task.finished_at {
|
if let Some(finished_at) = task.finished_at {
|
||||||
insert_task_datetime(wtxn, self.finished_at, finished_at, task.uid)?;
|
insert_task_datetime(wtxn, self.finished_at, finished_at, task.uid)?;
|
||||||
}
|
}
|
||||||
@ -269,7 +253,9 @@ pub fn swap_index_uid_in_task(task: &mut Task, swap: (&str, &str)) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
K::TaskCancelation { .. } | K::TaskDeletion { .. } | K::DumpExport { .. } | K::Snapshot => (),
|
K::TaskCancelation { .. } | K::TaskDeletion { .. } | K::DumpExport { .. } | K::Snapshot => {
|
||||||
|
()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
for index_uid in index_uids {
|
for index_uid in index_uids {
|
||||||
if index_uid == &swap.0 {
|
if index_uid == &swap.0 {
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
use once_cell::sync::Lazy;
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::path::PathBuf;
|
use std::collections::HashMap;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use std::{collections::HashMap, path::Path};
|
|
||||||
|
|
||||||
pub use insta;
|
pub use insta;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
static SNAPSHOT_NAMES: Lazy<Mutex<HashMap<PathBuf, usize>>> = Lazy::new(|| Mutex::default());
|
static SNAPSHOT_NAMES: Lazy<Mutex<HashMap<PathBuf, usize>>> = Lazy::new(|| Mutex::default());
|
||||||
|
|
||||||
@ -23,18 +23,9 @@ pub fn default_snapshot_settings_for_test(name: Option<&str>) -> (insta::Setting
|
|||||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||||
settings.set_omit_expression(true);
|
settings.set_omit_expression(true);
|
||||||
|
|
||||||
let test_name = std::thread::current()
|
let test_name = std::thread::current().name().unwrap().rsplit("::").next().unwrap().to_owned();
|
||||||
.name()
|
|
||||||
.unwrap()
|
|
||||||
.rsplit("::")
|
|
||||||
.next()
|
|
||||||
.unwrap()
|
|
||||||
.to_owned();
|
|
||||||
|
|
||||||
let path = Path::new("snapshots")
|
let path = Path::new("snapshots").join(filename).join(&test_name).to_owned();
|
||||||
.join(filename)
|
|
||||||
.join(&test_name)
|
|
||||||
.to_owned();
|
|
||||||
settings.set_snapshot_path(path.clone());
|
settings.set_snapshot_path(path.clone());
|
||||||
let snap_name = if let Some(name) = name {
|
let snap_name = if let Some(name) = name {
|
||||||
Cow::Borrowed(name)
|
Cow::Borrowed(name)
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
use serde_json::Deserializer;
|
|
||||||
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::BufReader;
|
use std::io::{BufReader, Write};
|
||||||
use std::io::Write;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
|
use serde_json::Deserializer;
|
||||||
|
|
||||||
use crate::{AuthController, HeedAuthStore, Result};
|
use crate::{AuthController, HeedAuthStore, Result};
|
||||||
|
|
||||||
const KEYS_PATH: &str = "keys";
|
const KEYS_PATH: &str = "keys";
|
||||||
|
@ -7,18 +7,16 @@ use std::ops::Deref;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use error::{AuthControllerError, Result};
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use meilisearch_types::keys::{Action, Key};
|
||||||
|
use meilisearch_types::star_or::StarOr;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
pub use store::open_auth_store_env;
|
||||||
|
use store::{generate_key_as_hexa, HeedAuthStore};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use error::{AuthControllerError, Result};
|
|
||||||
use meilisearch_types::star_or::StarOr;
|
|
||||||
use store::generate_key_as_hexa;
|
|
||||||
pub use store::open_auth_store_env;
|
|
||||||
use store::HeedAuthStore;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AuthController {
|
pub struct AuthController {
|
||||||
store: Arc<HeedAuthStore>,
|
store: Arc<HeedAuthStore>,
|
||||||
@ -33,18 +31,13 @@ impl AuthController {
|
|||||||
generate_default_keys(&store)?;
|
generate_default_keys(&store)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
|
||||||
store: Arc::new(store),
|
|
||||||
master_key: master_key.clone(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_key(&self, value: Value) -> Result<Key> {
|
pub fn create_key(&self, value: Value) -> Result<Key> {
|
||||||
let key = Key::create_from_value(value)?;
|
let key = Key::create_from_value(value)?;
|
||||||
match self.store.get_api_key(key.uid)? {
|
match self.store.get_api_key(key.uid)? {
|
||||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(
|
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(key.uid.to_string())),
|
||||||
key.uid.to_string(),
|
|
||||||
)),
|
|
||||||
None => self.store.put_api_key(key),
|
None => self.store.put_api_key(key),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -63,9 +56,9 @@ impl AuthController {
|
|||||||
|
|
||||||
pub fn get_optional_uid_from_encoded_key(&self, encoded_key: &[u8]) -> Result<Option<Uuid>> {
|
pub fn get_optional_uid_from_encoded_key(&self, encoded_key: &[u8]) -> Result<Option<Uuid>> {
|
||||||
match &self.master_key {
|
match &self.master_key {
|
||||||
Some(master_key) => self
|
Some(master_key) => {
|
||||||
.store
|
self.store.get_uid_from_encoded_key(encoded_key, master_key.as_bytes())
|
||||||
.get_uid_from_encoded_key(encoded_key, master_key.as_bytes()),
|
}
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -131,9 +124,7 @@ impl AuthController {
|
|||||||
/// Generate a valid key from a key id using the current master key.
|
/// Generate a valid key from a key id using the current master key.
|
||||||
/// Returns None if no master key has been set.
|
/// Returns None if no master key has been set.
|
||||||
pub fn generate_key(&self, uid: Uuid) -> Option<String> {
|
pub fn generate_key(&self, uid: Uuid) -> Option<String> {
|
||||||
self.master_key
|
self.master_key.as_ref().map(|master_key| generate_key_as_hexa(uid, master_key.as_bytes()))
|
||||||
.as_ref()
|
|
||||||
.map(|master_key| generate_key_as_hexa(uid, master_key.as_bytes()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the provided key is authorized to make a specific action
|
/// Check if the provided key is authorized to make a specific action
|
||||||
@ -151,8 +142,7 @@ impl AuthController {
|
|||||||
.or(match index {
|
.or(match index {
|
||||||
// else check if the key has access to the requested index.
|
// else check if the key has access to the requested index.
|
||||||
Some(index) => {
|
Some(index) => {
|
||||||
self.store
|
self.store.get_expiration_date(uid, action, Some(index.as_bytes()))?
|
||||||
.get_expiration_date(uid, action, Some(index.as_bytes()))?
|
|
||||||
}
|
}
|
||||||
// or to any index if no index has been requested.
|
// or to any index if no index has been requested.
|
||||||
None => self.store.prefix_first_expiration_date(uid, action)?,
|
None => self.store.prefix_first_expiration_date(uid, action)?,
|
||||||
@ -185,10 +175,7 @@ pub struct AuthFilter {
|
|||||||
|
|
||||||
impl Default for AuthFilter {
|
impl Default for AuthFilter {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self { search_rules: SearchRules::default(), allow_index_creation: true }
|
||||||
search_rules: SearchRules::default(),
|
|
||||||
allow_index_creation: true,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -223,10 +210,9 @@ impl SearchRules {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::Map(map) => map
|
Self::Map(map) => {
|
||||||
.get(index)
|
map.get(index).or_else(|| map.get("*")).map(|isr| isr.clone().unwrap_or_default())
|
||||||
.or_else(|| map.get("*"))
|
}
|
||||||
.map(|isr| isr.clone().unwrap_or_default()),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cmp::Reverse;
|
use std::cmp::Reverse;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::convert::TryFrom;
|
use std::convert::{TryFrom, TryInto};
|
||||||
use std::convert::TryInto;
|
|
||||||
use std::fs::create_dir_all;
|
use std::fs::create_dir_all;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@ -59,12 +58,7 @@ impl HeedAuthStore {
|
|||||||
let keys = env.create_database(Some(KEY_DB_NAME))?;
|
let keys = env.create_database(Some(KEY_DB_NAME))?;
|
||||||
let action_keyid_index_expiration =
|
let action_keyid_index_expiration =
|
||||||
env.create_database(Some(KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME))?;
|
env.create_database(Some(KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME))?;
|
||||||
Ok(Self {
|
Ok(Self { env, keys, action_keyid_index_expiration, should_close_on_drop: true })
|
||||||
env,
|
|
||||||
keys,
|
|
||||||
action_keyid_index_expiration,
|
|
||||||
should_close_on_drop: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_drop_on_close(&mut self, v: bool) {
|
pub fn set_drop_on_close(&mut self, v: bool) {
|
||||||
@ -94,12 +88,8 @@ impl HeedAuthStore {
|
|||||||
Action::All => actions.extend(enum_iterator::all::<Action>()),
|
Action::All => actions.extend(enum_iterator::all::<Action>()),
|
||||||
Action::DocumentsAll => {
|
Action::DocumentsAll => {
|
||||||
actions.extend(
|
actions.extend(
|
||||||
[
|
[Action::DocumentsGet, Action::DocumentsDelete, Action::DocumentsAdd]
|
||||||
Action::DocumentsGet,
|
.iter(),
|
||||||
Action::DocumentsDelete,
|
|
||||||
Action::DocumentsAdd,
|
|
||||||
]
|
|
||||||
.iter(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Action::IndexesAll => {
|
Action::IndexesAll => {
|
||||||
|
@ -72,11 +72,8 @@ mod mini_dashboard {
|
|||||||
resource_dir(&dashboard_dir).build()?;
|
resource_dir(&dashboard_dir).build()?;
|
||||||
|
|
||||||
// Write the sha1 for the dashboard back to file.
|
// Write the sha1 for the dashboard back to file.
|
||||||
let mut file = OpenOptions::new()
|
let mut file =
|
||||||
.write(true)
|
OpenOptions::new().write(true).create(true).truncate(true).open(sha1_path)?;
|
||||||
.create(true)
|
|
||||||
.truncate(true)
|
|
||||||
.open(sha1_path)?;
|
|
||||||
|
|
||||||
file.write_all(sha1.as_bytes())?;
|
file.write_all(sha1.as_bytes())?;
|
||||||
file.flush()?;
|
file.flush()?;
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
use std::{any::Any, sync::Arc};
|
use std::any::Any;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use actix_web::HttpRequest;
|
use actix_web::HttpRequest;
|
||||||
use meilisearch_types::InstanceUid;
|
use meilisearch_types::InstanceUid;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::{routes::indexes::documents::UpdateDocumentsQuery, Opt};
|
|
||||||
|
|
||||||
use super::{find_user_id, Analytics};
|
use super::{find_user_id, Analytics};
|
||||||
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
|
use crate::Opt;
|
||||||
|
|
||||||
pub struct MockAnalytics {
|
pub struct MockAnalytics {
|
||||||
instance_uid: Option<InstanceUid>,
|
instance_uid: Option<InstanceUid>,
|
||||||
|
@ -9,14 +9,13 @@ use std::str::FromStr;
|
|||||||
|
|
||||||
use actix_web::HttpRequest;
|
use actix_web::HttpRequest;
|
||||||
use meilisearch_types::InstanceUid;
|
use meilisearch_types::InstanceUid;
|
||||||
|
pub use mock_analytics::MockAnalytics;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use platform_dirs::AppDirs;
|
use platform_dirs::AppDirs;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
|
|
||||||
pub use mock_analytics::MockAnalytics;
|
|
||||||
|
|
||||||
// if we are in debug mode OR the analytics feature is disabled
|
// if we are in debug mode OR the analytics feature is disabled
|
||||||
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||||
@ -42,12 +41,7 @@ fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
|
|||||||
db_path
|
db_path
|
||||||
.canonicalize()
|
.canonicalize()
|
||||||
.ok()
|
.ok()
|
||||||
.map(|path| {
|
.map(|path| path.join("instance-uid").display().to_string().replace('/', "-"))
|
||||||
path.join("instance-uid")
|
|
||||||
.display()
|
|
||||||
.to_string()
|
|
||||||
.replace('/', "-")
|
|
||||||
})
|
|
||||||
.zip(MEILISEARCH_CONFIG_PATH.as_ref())
|
.zip(MEILISEARCH_CONFIG_PATH.as_ref())
|
||||||
.map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-')))
|
.map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-')))
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ use tokio::select;
|
|||||||
use tokio::sync::mpsc::{self, Receiver, Sender};
|
use tokio::sync::mpsc::{self, Receiver, Sender};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::option::default_http_addr;
|
use crate::option::default_http_addr;
|
||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
@ -31,16 +32,13 @@ use crate::search::{
|
|||||||
};
|
};
|
||||||
use crate::Opt;
|
use crate::Opt;
|
||||||
|
|
||||||
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
|
|
||||||
|
|
||||||
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
|
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
|
||||||
|
|
||||||
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
|
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
|
||||||
fn write_user_id(db_path: &Path, user_id: &InstanceUid) {
|
fn write_user_id(db_path: &Path, user_id: &InstanceUid) {
|
||||||
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
|
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
|
||||||
if let Some((meilisearch_config_path, user_id_path)) = MEILISEARCH_CONFIG_PATH
|
if let Some((meilisearch_config_path, user_id_path)) =
|
||||||
.as_ref()
|
MEILISEARCH_CONFIG_PATH.as_ref().zip(config_user_id_path(db_path))
|
||||||
.zip(config_user_id_path(db_path))
|
|
||||||
{
|
{
|
||||||
let _ = fs::create_dir_all(&meilisearch_config_path);
|
let _ = fs::create_dir_all(&meilisearch_config_path);
|
||||||
let _ = fs::write(user_id_path, user_id.to_string());
|
let _ = fs::write(user_id_path, user_id.to_string());
|
||||||
@ -84,22 +82,16 @@ impl SegmentAnalytics {
|
|||||||
let instance_uid = instance_uid.unwrap_or_else(|| Uuid::new_v4());
|
let instance_uid = instance_uid.unwrap_or_else(|| Uuid::new_v4());
|
||||||
write_user_id(&opt.db_path, &instance_uid);
|
write_user_id(&opt.db_path, &instance_uid);
|
||||||
|
|
||||||
let client = reqwest::Client::builder()
|
let client = reqwest::Client::builder().connect_timeout(Duration::from_secs(10)).build();
|
||||||
.connect_timeout(Duration::from_secs(10))
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// if reqwest throws an error we won't be able to send analytics
|
// if reqwest throws an error we won't be able to send analytics
|
||||||
if client.is_err() {
|
if client.is_err() {
|
||||||
return super::MockAnalytics::new(opt);
|
return super::MockAnalytics::new(opt);
|
||||||
}
|
}
|
||||||
|
|
||||||
let client = HttpClient::new(
|
let client =
|
||||||
client.unwrap(),
|
HttpClient::new(client.unwrap(), "https://telemetry.meilisearch.com".to_string());
|
||||||
"https://telemetry.meilisearch.com".to_string(),
|
let user = User::UserId { user_id: instance_uid.to_string() };
|
||||||
);
|
|
||||||
let user = User::UserId {
|
|
||||||
user_id: instance_uid.to_string(),
|
|
||||||
};
|
|
||||||
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
|
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
|
||||||
|
|
||||||
// If Meilisearch is Launched for the first time:
|
// If Meilisearch is Launched for the first time:
|
||||||
@ -108,9 +100,7 @@ impl SegmentAnalytics {
|
|||||||
if first_time_run {
|
if first_time_run {
|
||||||
let _ = batcher
|
let _ = batcher
|
||||||
.push(Track {
|
.push(Track {
|
||||||
user: User::UserId {
|
user: User::UserId { user_id: "total_launch".to_string() },
|
||||||
user_id: "total_launch".to_string(),
|
|
||||||
},
|
|
||||||
event: "Launched".to_string(),
|
event: "Launched".to_string(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
@ -139,11 +129,7 @@ impl SegmentAnalytics {
|
|||||||
});
|
});
|
||||||
tokio::spawn(segment.run(index_scheduler.clone()));
|
tokio::spawn(segment.run(index_scheduler.clone()));
|
||||||
|
|
||||||
let this = Self {
|
let this = Self { instance_uid, sender, user: user.clone() };
|
||||||
instance_uid,
|
|
||||||
sender,
|
|
||||||
user: user.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Arc::new(this)
|
Arc::new(this)
|
||||||
}
|
}
|
||||||
@ -164,21 +150,15 @@ impl super::Analytics for SegmentAnalytics {
|
|||||||
properties: send,
|
properties: send,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let _ = self
|
let _ = self.sender.try_send(AnalyticsMsg::BatchMessage(event.into()));
|
||||||
.sender
|
|
||||||
.try_send(AnalyticsMsg::BatchMessage(event.into()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_search(&self, aggregate: SearchAggregator) {
|
fn get_search(&self, aggregate: SearchAggregator) {
|
||||||
let _ = self
|
let _ = self.sender.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
|
||||||
.sender
|
|
||||||
.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn post_search(&self, aggregate: SearchAggregator) {
|
fn post_search(&self, aggregate: SearchAggregator) {
|
||||||
let _ = self
|
let _ = self.sender.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
|
||||||
.sender
|
|
||||||
.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_documents(
|
fn add_documents(
|
||||||
@ -188,9 +168,7 @@ impl super::Analytics for SegmentAnalytics {
|
|||||||
request: &HttpRequest,
|
request: &HttpRequest,
|
||||||
) {
|
) {
|
||||||
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
|
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
|
||||||
let _ = self
|
let _ = self.sender.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
|
||||||
.sender
|
|
||||||
.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_documents(
|
fn update_documents(
|
||||||
@ -200,9 +178,7 @@ impl super::Analytics for SegmentAnalytics {
|
|||||||
request: &HttpRequest,
|
request: &HttpRequest,
|
||||||
) {
|
) {
|
||||||
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
|
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
|
||||||
let _ = self
|
let _ = self.sender.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
|
||||||
.sender
|
|
||||||
.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -261,11 +237,8 @@ impl Segment {
|
|||||||
infos
|
infos
|
||||||
};
|
};
|
||||||
|
|
||||||
let number_of_documents = stats
|
let number_of_documents =
|
||||||
.indexes
|
stats.indexes.values().map(|index| index.number_of_documents).collect::<Vec<u64>>();
|
||||||
.values()
|
|
||||||
.map(|index| index.number_of_documents)
|
|
||||||
.collect::<Vec<u64>>();
|
|
||||||
|
|
||||||
json!({
|
json!({
|
||||||
"start_since_days": FIRST_START_TIMESTAMP.elapsed().as_secs() / (60 * 60 * 24), // one day
|
"start_since_days": FIRST_START_TIMESTAMP.elapsed().as_secs() / (60 * 60 * 24), // one day
|
||||||
@ -413,11 +386,7 @@ impl SearchAggregator {
|
|||||||
let syntax = match filter {
|
let syntax = match filter {
|
||||||
Value::String(_) => "string".to_string(),
|
Value::String(_) => "string".to_string(),
|
||||||
Value::Array(values) => {
|
Value::Array(values) => {
|
||||||
if values
|
if values.iter().map(|v| v.to_string()).any(|s| RE.is_match(&s)) {
|
||||||
.iter()
|
|
||||||
.map(|v| v.to_string())
|
|
||||||
.any(|s| RE.is_match(&s))
|
|
||||||
{
|
|
||||||
"mixed".to_string()
|
"mixed".to_string()
|
||||||
} else {
|
} else {
|
||||||
"array".to_string()
|
"array".to_string()
|
||||||
@ -448,8 +417,7 @@ impl SearchAggregator {
|
|||||||
ret.finite_pagination = 0;
|
ret.finite_pagination = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
ret.matching_strategy
|
ret.matching_strategy.insert(format!("{:?}", query.matching_strategy), 1);
|
||||||
.insert(format!("{:?}", query.matching_strategy), 1);
|
|
||||||
|
|
||||||
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
|
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
|
||||||
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
|
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
|
||||||
@ -481,17 +449,14 @@ impl SearchAggregator {
|
|||||||
self.time_spent.append(&mut other.time_spent);
|
self.time_spent.append(&mut other.time_spent);
|
||||||
// sort
|
// sort
|
||||||
self.sort_with_geo_point |= other.sort_with_geo_point;
|
self.sort_with_geo_point |= other.sort_with_geo_point;
|
||||||
self.sort_sum_of_criteria_terms = self
|
self.sort_sum_of_criteria_terms =
|
||||||
.sort_sum_of_criteria_terms
|
self.sort_sum_of_criteria_terms.saturating_add(other.sort_sum_of_criteria_terms);
|
||||||
.saturating_add(other.sort_sum_of_criteria_terms);
|
self.sort_total_number_of_criteria =
|
||||||
self.sort_total_number_of_criteria = self
|
self.sort_total_number_of_criteria.saturating_add(other.sort_total_number_of_criteria);
|
||||||
.sort_total_number_of_criteria
|
|
||||||
.saturating_add(other.sort_total_number_of_criteria);
|
|
||||||
// filter
|
// filter
|
||||||
self.filter_with_geo_radius |= other.filter_with_geo_radius;
|
self.filter_with_geo_radius |= other.filter_with_geo_radius;
|
||||||
self.filter_sum_of_criteria_terms = self
|
self.filter_sum_of_criteria_terms =
|
||||||
.filter_sum_of_criteria_terms
|
self.filter_sum_of_criteria_terms.saturating_add(other.filter_sum_of_criteria_terms);
|
||||||
.saturating_add(other.filter_sum_of_criteria_terms);
|
|
||||||
self.filter_total_number_of_criteria = self
|
self.filter_total_number_of_criteria = self
|
||||||
.filter_total_number_of_criteria
|
.filter_total_number_of_criteria
|
||||||
.saturating_add(other.filter_total_number_of_criteria);
|
.saturating_add(other.filter_total_number_of_criteria);
|
||||||
|
@ -33,11 +33,7 @@ impl<P, D> GuardedData<P, D> {
|
|||||||
{
|
{
|
||||||
match Self::authenticate(auth, token, index).await? {
|
match Self::authenticate(auth, token, index).await? {
|
||||||
Some(filters) => match data {
|
Some(filters) => match data {
|
||||||
Some(data) => Ok(Self {
|
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
|
||||||
data,
|
|
||||||
filters,
|
|
||||||
_marker: PhantomData,
|
|
||||||
}),
|
|
||||||
None => Err(AuthenticationError::IrretrievableState.into()),
|
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||||
},
|
},
|
||||||
None => Err(AuthenticationError::InvalidToken.into()),
|
None => Err(AuthenticationError::InvalidToken.into()),
|
||||||
@ -52,12 +48,7 @@ impl<P, D> GuardedData<P, D> {
|
|||||||
|
|
||||||
match Self::authenticate(auth, String::new(), None).await? {
|
match Self::authenticate(auth, String::new(), None).await? {
|
||||||
Some(filters) => match data {
|
Some(filters) => match data {
|
||||||
Some(data) => Ok(Self {
|
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
|
||||||
data,
|
|
||||||
filters,
|
|
||||||
_marker: PhantomData,
|
|
||||||
}),
|
|
||||||
|
|
||||||
None => Err(AuthenticationError::IrretrievableState.into()),
|
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||||
},
|
},
|
||||||
None if missing_master_key => Err(AuthenticationError::MissingMasterKey.into()),
|
None if missing_master_key => Err(AuthenticationError::MissingMasterKey.into()),
|
||||||
@ -133,14 +124,14 @@ pub trait Policy {
|
|||||||
|
|
||||||
pub mod policies {
|
pub mod policies {
|
||||||
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
||||||
|
use meilisearch_auth::{AuthController, AuthFilter, SearchRules};
|
||||||
|
// reexport actions in policies in order to be used in routes configuration.
|
||||||
|
pub use meilisearch_types::keys::{actions, Action};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::extractors::authentication::Policy;
|
use crate::extractors::authentication::Policy;
|
||||||
use meilisearch_auth::{AuthController, AuthFilter, SearchRules};
|
|
||||||
// reexport actions in policies in order to be used in routes configuration.
|
|
||||||
pub use meilisearch_types::keys::{actions, Action};
|
|
||||||
|
|
||||||
fn tenant_token_validation() -> Validation {
|
fn tenant_token_validation() -> Validation {
|
||||||
let mut validation = Validation::default();
|
let mut validation = Validation::default();
|
||||||
@ -178,10 +169,7 @@ pub mod policies {
|
|||||||
// authenticate if token is the master key.
|
// authenticate if token is the master key.
|
||||||
// master key can only have access to keys routes.
|
// master key can only have access to keys routes.
|
||||||
// if master key is None only keys routes are inaccessible.
|
// if master key is None only keys routes are inaccessible.
|
||||||
if auth
|
if auth.get_master_key().map_or_else(|| !is_keys_action(A), |mk| mk == token) {
|
||||||
.get_master_key()
|
|
||||||
.map_or_else(|| !is_keys_action(A), |mk| mk == token)
|
|
||||||
{
|
|
||||||
return Some(AuthFilter::default());
|
return Some(AuthFilter::default());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -239,9 +227,7 @@ pub mod policies {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return auth
|
return auth.get_key_filters(uid, Some(data.claims.search_rules)).ok();
|
||||||
.get_key_filters(uid, Some(data.claims.search_rules))
|
|
||||||
.ok();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
use std::{future::Future, pin::Pin, task::Poll};
|
use std::future::Future;
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::task::Poll;
|
||||||
|
|
||||||
use actix_web::{dev::Payload, FromRequest, Handler, HttpRequest};
|
use actix_web::dev::Payload;
|
||||||
|
use actix_web::{FromRequest, Handler, HttpRequest};
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
|
|
||||||
/// `SeqHandler` is an actix `Handler` that enforces that extractors errors are returned in the
|
/// `SeqHandler` is an actix `Handler` that enforces that extractors errors are returned in the
|
||||||
|
@ -13,37 +13,32 @@ pub mod metrics;
|
|||||||
#[cfg(feature = "metrics")]
|
#[cfg(feature = "metrics")]
|
||||||
pub mod route_metrics;
|
pub mod route_metrics;
|
||||||
|
|
||||||
use std::{
|
use std::fs::File;
|
||||||
fs::File,
|
use std::io::{BufReader, BufWriter};
|
||||||
io::{BufReader, BufWriter},
|
use std::path::Path;
|
||||||
path::Path,
|
use std::sync::atomic::AtomicBool;
|
||||||
sync::{atomic::AtomicBool, Arc},
|
use std::sync::Arc;
|
||||||
};
|
|
||||||
|
|
||||||
use crate::error::MeilisearchHttpError;
|
|
||||||
use actix_cors::Cors;
|
use actix_cors::Cors;
|
||||||
use actix_http::body::MessageBody;
|
use actix_http::body::MessageBody;
|
||||||
use actix_web::{dev::ServiceFactory, error::JsonPayloadError, middleware};
|
use actix_web::dev::{ServiceFactory, ServiceResponse};
|
||||||
use actix_web::{dev::ServiceResponse, web::Data};
|
use actix_web::error::JsonPayloadError;
|
||||||
|
use actix_web::web::Data;
|
||||||
|
use actix_web::{middleware, web, HttpRequest};
|
||||||
use analytics::Analytics;
|
use analytics::Analytics;
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use error::PayloadError;
|
use error::PayloadError;
|
||||||
use http::header::CONTENT_TYPE;
|
|
||||||
use meilisearch_types::{
|
|
||||||
milli::{
|
|
||||||
self,
|
|
||||||
documents::{DocumentsBatchBuilder, DocumentsBatchReader},
|
|
||||||
update::{IndexDocumentsConfig, IndexDocumentsMethod},
|
|
||||||
},
|
|
||||||
settings::apply_settings_to_builder,
|
|
||||||
};
|
|
||||||
pub use option::Opt;
|
|
||||||
|
|
||||||
use actix_web::{web, HttpRequest};
|
|
||||||
|
|
||||||
use extractors::payload::PayloadConfig;
|
use extractors::payload::PayloadConfig;
|
||||||
|
use http::header::CONTENT_TYPE;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
|
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||||
|
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
||||||
|
use meilisearch_types::milli::{self};
|
||||||
|
use meilisearch_types::settings::apply_settings_to_builder;
|
||||||
|
pub use option::Opt;
|
||||||
|
|
||||||
|
use crate::error::MeilisearchHttpError;
|
||||||
|
|
||||||
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
|
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
@ -103,14 +98,9 @@ pub fn create_app(
|
|||||||
)
|
)
|
||||||
.wrap(middleware::Logger::default())
|
.wrap(middleware::Logger::default())
|
||||||
.wrap(middleware::Compress::default())
|
.wrap(middleware::Compress::default())
|
||||||
.wrap(middleware::NormalizePath::new(
|
.wrap(middleware::NormalizePath::new(middleware::TrailingSlash::Trim));
|
||||||
middleware::TrailingSlash::Trim,
|
|
||||||
));
|
|
||||||
#[cfg(feature = "metrics")]
|
#[cfg(feature = "metrics")]
|
||||||
let app = app.wrap(Condition::new(
|
let app = app.wrap(Condition::new(opt.enable_metrics_route, route_metrics::RouteMetrics));
|
||||||
opt.enable_metrics_route,
|
|
||||||
route_metrics::RouteMetrics,
|
|
||||||
));
|
|
||||||
app
|
app
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -154,30 +144,18 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(IndexScheduler, AuthContr
|
|||||||
|
|
||||||
if empty_db && src_path_exists {
|
if empty_db && src_path_exists {
|
||||||
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
|
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
|
||||||
import_dump(
|
import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller)?;
|
||||||
&opt.db_path,
|
|
||||||
path,
|
|
||||||
&mut index_scheduler,
|
|
||||||
&mut auth_controller,
|
|
||||||
)?;
|
|
||||||
(index_scheduler, auth_controller)
|
(index_scheduler, auth_controller)
|
||||||
} else if !empty_db && !opt.ignore_dump_if_db_exists {
|
} else if !empty_db && !opt.ignore_dump_if_db_exists {
|
||||||
bail!(
|
bail!(
|
||||||
"database already exists at {:?}, try to delete it or rename it",
|
"database already exists at {:?}, try to delete it or rename it",
|
||||||
opt.db_path
|
opt.db_path.canonicalize().unwrap_or_else(|_| opt.db_path.to_owned())
|
||||||
.canonicalize()
|
|
||||||
.unwrap_or_else(|_| opt.db_path.to_owned())
|
|
||||||
)
|
)
|
||||||
} else if !src_path_exists && !opt.ignore_missing_dump {
|
} else if !src_path_exists && !opt.ignore_missing_dump {
|
||||||
bail!("dump doesn't exist at {:?}", path)
|
bail!("dump doesn't exist at {:?}", path)
|
||||||
} else {
|
} else {
|
||||||
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
|
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
|
||||||
import_dump(
|
import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller)?;
|
||||||
&opt.db_path,
|
|
||||||
path,
|
|
||||||
&mut index_scheduler,
|
|
||||||
&mut auth_controller,
|
|
||||||
)?;
|
|
||||||
(index_scheduler, auth_controller)
|
(index_scheduler, auth_controller)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -232,10 +210,7 @@ fn import_dump(
|
|||||||
// 1. Import the instance-uid.
|
// 1. Import the instance-uid.
|
||||||
if let Some(ref instance_uid) = instance_uid {
|
if let Some(ref instance_uid) = instance_uid {
|
||||||
// we don't want to panic if there is an error with the instance-uid.
|
// we don't want to panic if there is an error with the instance-uid.
|
||||||
let _ = std::fs::write(
|
let _ = std::fs::write(db_path.join("instance-uid"), instance_uid.to_string().as_bytes());
|
||||||
db_path.join("instance-uid"),
|
|
||||||
instance_uid.to_string().as_bytes(),
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// 2. Import the `Key`s.
|
// 2. Import the `Key`s.
|
||||||
@ -271,10 +246,7 @@ fn import_dump(
|
|||||||
log::info!("Importing the settings.");
|
log::info!("Importing the settings.");
|
||||||
let settings = index_reader.settings()?;
|
let settings = index_reader.settings()?;
|
||||||
apply_settings_to_builder(&settings, &mut builder);
|
apply_settings_to_builder(&settings, &mut builder);
|
||||||
builder.execute(
|
builder.execute(|indexing_step| log::debug!("update: {:?}", indexing_step), || false)?;
|
||||||
|indexing_step| log::debug!("update: {:?}", indexing_step),
|
|
||||||
|| false,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// 3.3 Import the documents.
|
// 3.3 Import the documents.
|
||||||
// 3.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
// 3.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||||
@ -368,9 +340,7 @@ pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
|||||||
let generated = generated::generate();
|
let generated = generated::generate();
|
||||||
// Generate routes for mini-dashboard assets
|
// Generate routes for mini-dashboard assets
|
||||||
for (path, resource) in generated.into_iter() {
|
for (path, resource) in generated.into_iter() {
|
||||||
let Resource {
|
let Resource { mime_type, data, .. } = resource;
|
||||||
mime_type, data, ..
|
|
||||||
} = resource;
|
|
||||||
// Redirect index.html to /
|
// Redirect index.html to /
|
||||||
if path == "index.html" {
|
if path == "index.html" {
|
||||||
config.service(web::resource("/").route(web::get().to(move || async move {
|
config.service(web::resource("/").route(web::get().to(move || async move {
|
||||||
|
@ -8,8 +8,7 @@ use actix_web::HttpServer;
|
|||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_http::analytics::Analytics;
|
use meilisearch_http::analytics::Analytics;
|
||||||
use meilisearch_http::{analytics, create_app};
|
use meilisearch_http::{analytics, create_app, setup_meilisearch, Opt};
|
||||||
use meilisearch_http::{setup_meilisearch, Opt};
|
|
||||||
|
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||||
@ -89,24 +88,22 @@ async fn run_http(
|
|||||||
.keep_alive(KeepAlive::Os);
|
.keep_alive(KeepAlive::Os);
|
||||||
|
|
||||||
if let Some(config) = opt_clone.get_ssl_config()? {
|
if let Some(config) = opt_clone.get_ssl_config()? {
|
||||||
http_server
|
http_server.bind_rustls(opt_clone.http_addr, config)?.run().await?;
|
||||||
.bind_rustls(opt_clone.http_addr, config)?
|
|
||||||
.run()
|
|
||||||
.await?;
|
|
||||||
} else {
|
} else {
|
||||||
http_server.bind(&opt_clone.http_addr)?.run().await?;
|
http_server.bind(&opt_clone.http_addr)?.run().await?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_launch_resume(opt: &Opt, analytics: Arc<dyn Analytics>, config_read_from: Option<PathBuf>) {
|
pub fn print_launch_resume(
|
||||||
|
opt: &Opt,
|
||||||
|
analytics: Arc<dyn Analytics>,
|
||||||
|
config_read_from: Option<PathBuf>,
|
||||||
|
) {
|
||||||
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
||||||
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||||
let protocol = if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() {
|
let protocol =
|
||||||
"https"
|
if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() { "https" } else { "http" };
|
||||||
} else {
|
|
||||||
"http"
|
|
||||||
};
|
|
||||||
let ascii_name = r#"
|
let ascii_name = r#"
|
||||||
888b d888 d8b 888 d8b 888
|
888b d888 d8b 888 d8b 888
|
||||||
8888b d8888 Y8P 888 Y8P 888
|
8888b d8888 Y8P 888 Y8P 888
|
||||||
@ -131,10 +128,7 @@ pub fn print_launch_resume(opt: &Opt, analytics: Arc<dyn Analytics>, config_read
|
|||||||
eprintln!("Environment:\t\t{:?}", opt.env);
|
eprintln!("Environment:\t\t{:?}", opt.env);
|
||||||
eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string());
|
eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string());
|
||||||
eprintln!("Commit date:\t\t{:?}", commit_date.to_string());
|
eprintln!("Commit date:\t\t{:?}", commit_date.to_string());
|
||||||
eprintln!(
|
eprintln!("Package version:\t{:?}", env!("CARGO_PKG_VERSION").to_string());
|
||||||
"Package version:\t{:?}",
|
|
||||||
env!("CARGO_PKG_VERSION").to_string()
|
|
||||||
);
|
|
||||||
|
|
||||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
{
|
{
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use prometheus::{
|
use prometheus::{
|
||||||
opts, register_histogram_vec, register_int_counter_vec, register_int_gauge,
|
opts, register_histogram_vec, register_int_counter_vec, register_int_gauge,
|
||||||
register_int_gauge_vec,
|
register_int_gauge_vec, HistogramVec, IntCounterVec, IntGauge, IntGaugeVec,
|
||||||
};
|
};
|
||||||
use prometheus::{HistogramVec, IntCounterVec, IntGauge, IntGaugeVec};
|
|
||||||
|
|
||||||
const HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: &[f64; 14] = &[
|
const HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: &[f64; 14] = &[
|
||||||
0.0005, 0.0008, 0.00085, 0.0009, 0.00095, 0.001, 0.00105, 0.0011, 0.00115, 0.0012, 0.0015,
|
0.0005, 0.0008, 0.00085, 0.0009, 0.00095, 0.001, 0.00105, 0.0011, 0.00115, 0.0012, 0.0015,
|
||||||
@ -16,19 +15,14 @@ lazy_static! {
|
|||||||
&["method", "path"]
|
&["method", "path"]
|
||||||
)
|
)
|
||||||
.expect("Can't create a metric");
|
.expect("Can't create a metric");
|
||||||
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!(
|
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
|
||||||
"meilisearch_db_size_bytes",
|
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch Db Size In Bytes"))
|
||||||
"Meilisearch Db Size In Bytes"
|
.expect("Can't create a metric");
|
||||||
))
|
|
||||||
.expect("Can't create a metric");
|
|
||||||
pub static ref MEILISEARCH_INDEX_COUNT: IntGauge =
|
pub static ref MEILISEARCH_INDEX_COUNT: IntGauge =
|
||||||
register_int_gauge!(opts!("meilisearch_index_count", "Meilisearch Index Count"))
|
register_int_gauge!(opts!("meilisearch_index_count", "Meilisearch Index Count"))
|
||||||
.expect("Can't create a metric");
|
.expect("Can't create a metric");
|
||||||
pub static ref MEILISEARCH_INDEX_DOCS_COUNT: IntGaugeVec = register_int_gauge_vec!(
|
pub static ref MEILISEARCH_INDEX_DOCS_COUNT: IntGaugeVec = register_int_gauge_vec!(
|
||||||
opts!(
|
opts!("meilisearch_index_docs_count", "Meilisearch Index Docs Count"),
|
||||||
"meilisearch_index_docs_count",
|
|
||||||
"Meilisearch Index Docs Count"
|
|
||||||
),
|
|
||||||
&["index"]
|
&["index"]
|
||||||
)
|
)
|
||||||
.expect("Can't create a metric");
|
.expect("Can't create a metric");
|
||||||
|
@ -1,24 +1,21 @@
|
|||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
|
use std::env::VarError;
|
||||||
|
use std::ffi::OsStr;
|
||||||
use std::io::{BufReader, Read};
|
use std::io::{BufReader, Read};
|
||||||
use std::num::ParseIntError;
|
use std::num::ParseIntError;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::ffi::OsStr;
|
|
||||||
use std::env::VarError;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::{env, fmt, fs};
|
use std::{env, fmt, fs};
|
||||||
|
|
||||||
use byte_unit::{Byte, ByteError};
|
use byte_unit::{Byte, ByteError};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
use rustls::{
|
use rustls::server::{
|
||||||
server::{
|
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, ServerSessionMemoryCache,
|
||||||
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient,
|
|
||||||
ServerSessionMemoryCache,
|
|
||||||
},
|
|
||||||
RootCertStore,
|
|
||||||
};
|
};
|
||||||
|
use rustls::RootCertStore;
|
||||||
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sysinfo::{RefreshKind, System, SystemExt};
|
use sysinfo::{RefreshKind, System, SystemExt};
|
||||||
@ -502,9 +499,7 @@ pub struct SchedulerConfig {
|
|||||||
|
|
||||||
impl SchedulerConfig {
|
impl SchedulerConfig {
|
||||||
pub fn export_to_env(self) {
|
pub fn export_to_env(self) {
|
||||||
let SchedulerConfig {
|
let SchedulerConfig { disable_auto_batching } = self;
|
||||||
disable_auto_batching,
|
|
||||||
} = self;
|
|
||||||
export_to_env_if_not_present(DISABLE_AUTO_BATCHING, disable_auto_batching.to_string());
|
export_to_env_if_not_present(DISABLE_AUTO_BATCHING, disable_auto_batching.to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -513,9 +508,8 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
|
|||||||
type Error = anyhow::Error;
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
|
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
|
||||||
let thread_pool = rayon::ThreadPoolBuilder::new()
|
let thread_pool =
|
||||||
.num_threads(*other.max_indexing_threads)
|
rayon::ThreadPoolBuilder::new().num_threads(*other.max_indexing_threads).build()?;
|
||||||
.build()?;
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
log_every_n: Some(other.log_every_n),
|
log_every_n: Some(other.log_every_n),
|
||||||
@ -553,11 +547,7 @@ impl FromStr for MaxMemory {
|
|||||||
|
|
||||||
impl Default for MaxMemory {
|
impl Default for MaxMemory {
|
||||||
fn default() -> MaxMemory {
|
fn default() -> MaxMemory {
|
||||||
MaxMemory(
|
MaxMemory(total_memory_bytes().map(|bytes| bytes * 2 / 3).map(Byte::from_bytes))
|
||||||
total_memory_bytes()
|
|
||||||
.map(|bytes| bytes * 2 / 3)
|
|
||||||
.map(Byte::from_bytes),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -757,21 +747,18 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_meilli_config_file_path_invalid() {
|
fn test_meilli_config_file_path_invalid() {
|
||||||
temp_env::with_vars(
|
temp_env::with_vars(vec![("MEILI_CONFIG_FILE_PATH", Some("../configgg.toml"))], || {
|
||||||
vec![("MEILI_CONFIG_FILE_PATH", Some("../configgg.toml"))],
|
let possible_error_messages = [
|
||||||
|| {
|
|
||||||
let possible_error_messages = [
|
|
||||||
"unable to open or read the \"../configgg.toml\" configuration file: No such file or directory (os error 2).",
|
"unable to open or read the \"../configgg.toml\" configuration file: No such file or directory (os error 2).",
|
||||||
"unable to open or read the \"../configgg.toml\" configuration file: The system cannot find the file specified. (os error 2).", // Windows
|
"unable to open or read the \"../configgg.toml\" configuration file: The system cannot find the file specified. (os error 2).", // Windows
|
||||||
];
|
];
|
||||||
let error_message = Opt::try_build().unwrap_err().to_string();
|
let error_message = Opt::try_build().unwrap_err().to_string();
|
||||||
assert!(
|
assert!(
|
||||||
possible_error_messages.contains(&error_message.as_str()),
|
possible_error_messages.contains(&error_message.as_str()),
|
||||||
"Expected onf of {:?}, got {:?}.",
|
"Expected onf of {:?}, got {:?}.",
|
||||||
possible_error_messages,
|
possible_error_messages,
|
||||||
error_message
|
error_message
|
||||||
);
|
);
|
||||||
},
|
});
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,17 +1,13 @@
|
|||||||
use std::future::{ready, Ready};
|
use std::future::{ready, Ready};
|
||||||
|
|
||||||
|
use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform};
|
||||||
use actix_web::http::header;
|
use actix_web::http::header;
|
||||||
use actix_web::HttpResponse;
|
use actix_web::{Error, HttpResponse};
|
||||||
use actix_web::{
|
|
||||||
dev::{self, Service, ServiceRequest, ServiceResponse, Transform},
|
|
||||||
Error,
|
|
||||||
};
|
|
||||||
use futures_util::future::LocalBoxFuture;
|
use futures_util::future::LocalBoxFuture;
|
||||||
use meilisearch_auth::actions;
|
use meilisearch_auth::actions;
|
||||||
use meilisearch_lib::MeiliSearch;
|
use meilisearch_lib::MeiliSearch;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use prometheus::HistogramTimer;
|
use prometheus::{Encoder, HistogramTimer, TextEncoder};
|
||||||
use prometheus::{Encoder, TextEncoder};
|
|
||||||
|
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
@ -33,15 +29,11 @@ pub async fn get_metrics(
|
|||||||
|
|
||||||
let encoder = TextEncoder::new();
|
let encoder = TextEncoder::new();
|
||||||
let mut buffer = vec![];
|
let mut buffer = vec![];
|
||||||
encoder
|
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");
|
||||||
.encode(&prometheus::gather(), &mut buffer)
|
|
||||||
.expect("Failed to encode metrics");
|
|
||||||
|
|
||||||
let response = String::from_utf8(buffer).expect("Failed to convert bytes to string");
|
let response = String::from_utf8(buffer).expect("Failed to convert bytes to string");
|
||||||
|
|
||||||
Ok(HttpResponse::Ok()
|
Ok(HttpResponse::Ok().insert_header(header::ContentType(mime::TEXT_PLAIN)).body(response))
|
||||||
.insert_header(header::ContentType(mime::TEXT_PLAIN))
|
|
||||||
.body(response))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct RouteMetrics;
|
pub struct RouteMetrics;
|
||||||
|
@ -1,19 +1,18 @@
|
|||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
|
use meilisearch_auth::error::AuthControllerError;
|
||||||
|
use meilisearch_auth::AuthController;
|
||||||
|
use meilisearch_types::error::{Code, ResponseError};
|
||||||
|
use meilisearch_types::keys::{Action, Key};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use meilisearch_auth::{error::AuthControllerError, AuthController};
|
use crate::extractors::authentication::policies::*;
|
||||||
use meilisearch_types::error::{Code, ResponseError};
|
use crate::extractors::authentication::GuardedData;
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
use crate::extractors::{
|
|
||||||
authentication::{policies::*, GuardedData},
|
|
||||||
sequential_extractor::SeqHandler,
|
|
||||||
};
|
|
||||||
use crate::routes::Pagination;
|
use crate::routes::Pagination;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
@ -52,10 +51,8 @@ pub async fn list_api_keys(
|
|||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||||
let keys = auth_controller.list_keys()?;
|
let keys = auth_controller.list_keys()?;
|
||||||
let page_view = paginate.auto_paginate_sized(
|
let page_view = paginate
|
||||||
keys.into_iter()
|
.auto_paginate_sized(keys.into_iter().map(|k| KeyView::from_key(k, &auth_controller)));
|
||||||
.map(|k| KeyView::from_key(k, &auth_controller)),
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(page_view)
|
Ok(page_view)
|
||||||
})
|
})
|
||||||
|
@ -10,7 +10,8 @@ use time::macros::format_description;
|
|||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::SummarizedTaskView;
|
use crate::routes::SummarizedTaskView;
|
||||||
|
|
||||||
@ -38,9 +39,7 @@ pub async fn create_dump(
|
|||||||
dump_uid,
|
dump_uid,
|
||||||
};
|
};
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
|
@ -2,8 +2,7 @@ use std::io::Cursor;
|
|||||||
|
|
||||||
use actix_web::http::header::CONTENT_TYPE;
|
use actix_web::http::header::CONTENT_TYPE;
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::HttpMessage;
|
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
|
||||||
use bstr::ByteSlice;
|
use bstr::ByteSlice;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
@ -23,17 +22,14 @@ use serde_json::Value;
|
|||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::payload::Payload;
|
use crate::extractors::payload::Payload;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::{fold_star_or, PaginationView, SummarizedTaskView};
|
use crate::routes::{fold_star_or, PaginationView, SummarizedTaskView};
|
||||||
|
|
||||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||||
vec![
|
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
|
||||||
"application/json".to_string(),
|
|
||||||
"application/x-ndjson".to_string(),
|
|
||||||
"text/csv".to_string(),
|
|
||||||
]
|
|
||||||
});
|
});
|
||||||
|
|
||||||
/// Extracts the mime type from the content type and return
|
/// Extracts the mime type from the content type and return
|
||||||
@ -47,9 +43,7 @@ fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpE
|
|||||||
content_type.as_bytes().as_bstr().to_string(),
|
content_type.as_bytes().as_bstr().to_string(),
|
||||||
ACCEPTED_CONTENT_TYPE.clone(),
|
ACCEPTED_CONTENT_TYPE.clone(),
|
||||||
)),
|
)),
|
||||||
None => Err(MeilisearchHttpError::MissingContentType(
|
None => Err(MeilisearchHttpError::MissingContentType(ACCEPTED_CONTENT_TYPE.clone())),
|
||||||
ACCEPTED_CONTENT_TYPE.clone(),
|
|
||||||
)),
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -101,18 +95,10 @@ pub async fn delete_document(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<DocumentParam>,
|
path: web::Path<DocumentParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let DocumentParam {
|
let DocumentParam { document_id, index_uid } = path.into_inner();
|
||||||
document_id,
|
let task = KindWithContent::DocumentDeletion { index_uid, documents_ids: vec![document_id] };
|
||||||
index_uid,
|
|
||||||
} = path.into_inner();
|
|
||||||
let task = KindWithContent::DocumentDeletion {
|
|
||||||
index_uid,
|
|
||||||
documents_ids: vec![document_id],
|
|
||||||
};
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@ -133,11 +119,7 @@ pub async fn get_all_documents(
|
|||||||
params: web::Query<BrowseQuery>,
|
params: web::Query<BrowseQuery>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
let BrowseQuery {
|
let BrowseQuery { limit, offset, fields } = params.into_inner();
|
||||||
limit,
|
|
||||||
offset,
|
|
||||||
fields,
|
|
||||||
} = params.into_inner();
|
|
||||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
@ -220,10 +202,7 @@ async fn document_addition(
|
|||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
allow_index_creation: bool,
|
allow_index_creation: bool,
|
||||||
) -> Result<SummarizedTaskView, MeilisearchHttpError> {
|
) -> Result<SummarizedTaskView, MeilisearchHttpError> {
|
||||||
let format = match mime_type
|
let format = match mime_type.as_ref().map(|m| (m.type_().as_str(), m.subtype().as_str())) {
|
||||||
.as_ref()
|
|
||||||
.map(|m| (m.type_().as_str(), m.subtype().as_str()))
|
|
||||||
{
|
|
||||||
Some(("application", "json")) => PayloadType::Json,
|
Some(("application", "json")) => PayloadType::Json,
|
||||||
Some(("application", "x-ndjson")) => PayloadType::Ndjson,
|
Some(("application", "x-ndjson")) => PayloadType::Ndjson,
|
||||||
Some(("text", "csv")) => PayloadType::Csv,
|
Some(("text", "csv")) => PayloadType::Csv,
|
||||||
@ -234,9 +213,7 @@ async fn document_addition(
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
return Err(MeilisearchHttpError::MissingContentType(
|
return Err(MeilisearchHttpError::MissingContentType(ACCEPTED_CONTENT_TYPE.clone()))
|
||||||
ACCEPTED_CONTENT_TYPE.clone(),
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -308,21 +285,13 @@ pub async fn delete_documents(
|
|||||||
debug!("called with params: {:?}", body);
|
debug!("called with params: {:?}", body);
|
||||||
let ids = body
|
let ids = body
|
||||||
.iter()
|
.iter()
|
||||||
.map(|v| {
|
.map(|v| v.as_str().map(String::from).unwrap_or_else(|| v.to_string()))
|
||||||
v.as_str()
|
|
||||||
.map(String::from)
|
|
||||||
.unwrap_or_else(|| v.to_string())
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let task = KindWithContent::DocumentDeletion {
|
let task =
|
||||||
index_uid: path.into_inner(),
|
KindWithContent::DocumentDeletion { index_uid: path.into_inner(), documents_ids: ids };
|
||||||
documents_ids: ids,
|
|
||||||
};
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
@ -332,13 +301,9 @@ pub async fn clear_all_documents(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<String>,
|
path: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let task = KindWithContent::DocumentClear {
|
let task = KindWithContent::DocumentClear { index_uid: path.into_inner() };
|
||||||
index_uid: path.into_inner(),
|
|
||||||
};
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
@ -352,10 +317,9 @@ fn all_documents<'a>(
|
|||||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
||||||
|
|
||||||
Ok(index.all_documents(rtxn)?.map(move |ret| {
|
Ok(index.all_documents(rtxn)?.map(move |ret| {
|
||||||
ret.map_err(ResponseError::from)
|
ret.map_err(ResponseError::from).and_then(|(_key, document)| -> Result<_, ResponseError> {
|
||||||
.and_then(|(_key, document)| -> Result<_, ResponseError> {
|
Ok(milli::obkv_to_json(&all_fields, &fields_ids_map, document)?)
|
||||||
Ok(milli::obkv_to_json(&all_fields, &fields_ids_map, document)?)
|
})
|
||||||
})
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,11 +9,11 @@ use serde::{Deserialize, Serialize};
|
|||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
|
||||||
use crate::extractors::authentication::{policies::*, AuthenticationError, GuardedData};
|
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
|
||||||
|
|
||||||
use super::{Pagination, SummarizedTaskView};
|
use super::{Pagination, SummarizedTaskView};
|
||||||
|
use crate::analytics::Analytics;
|
||||||
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
pub mod documents;
|
pub mod documents;
|
||||||
pub mod search;
|
pub mod search;
|
||||||
@ -104,14 +104,9 @@ pub async fn create_index(
|
|||||||
Some(&req),
|
Some(&req),
|
||||||
);
|
);
|
||||||
|
|
||||||
let task = KindWithContent::IndexCreation {
|
let task = KindWithContent::IndexCreation { index_uid: uid, primary_key };
|
||||||
index_uid: uid,
|
|
||||||
primary_key,
|
|
||||||
};
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
} else {
|
} else {
|
||||||
@ -160,9 +155,7 @@ pub async fn update_index(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
@ -172,13 +165,9 @@ pub async fn delete_index(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let task = KindWithContent::IndexDeletion {
|
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
|
||||||
index_uid: index_uid.into_inner(),
|
|
||||||
};
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@ -189,11 +178,7 @@ pub async fn get_index_stats(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
analytics.publish(
|
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": true }), Some(&req));
|
||||||
"Stats Seen".to_string(),
|
|
||||||
json!({ "per_index_uid": true }),
|
|
||||||
Some(&req),
|
|
||||||
);
|
|
||||||
|
|
||||||
let stats = IndexStats::new((*index_scheduler).clone(), index_uid.into_inner());
|
let stats = IndexStats::new((*index_scheduler).clone(), index_uid.into_inner());
|
||||||
|
|
||||||
|
@ -9,7 +9,8 @@ use serde_cs::vec::CS;
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::analytics::{Analytics, SearchAggregator};
|
use crate::analytics::{Analytics, SearchAggregator};
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||||
@ -76,9 +77,7 @@ impl From<SearchQueryGet> for SearchQuery {
|
|||||||
.map(|o| o.into_iter().collect()),
|
.map(|o| o.into_iter().collect()),
|
||||||
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
||||||
crop_length: other.crop_length,
|
crop_length: other.crop_length,
|
||||||
attributes_to_highlight: other
|
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
|
||||||
.attributes_to_highlight
|
|
||||||
.map(|o| o.into_iter().collect()),
|
|
||||||
filter,
|
filter,
|
||||||
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||||
show_matches_position: other.show_matches_position,
|
show_matches_position: other.show_matches_position,
|
||||||
@ -147,10 +146,8 @@ pub async fn search_with_url_query(
|
|||||||
let mut query: SearchQuery = params.into_inner().into();
|
let mut query: SearchQuery = params.into_inner().into();
|
||||||
|
|
||||||
// Tenant token search_rules.
|
// Tenant token search_rules.
|
||||||
if let Some(search_rules) = index_scheduler
|
if let Some(search_rules) =
|
||||||
.filters()
|
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
|
||||||
.search_rules
|
|
||||||
.get_index_search_rules(&index_uid)
|
|
||||||
{
|
{
|
||||||
add_search_rules(&mut query, search_rules);
|
add_search_rules(&mut query, search_rules);
|
||||||
}
|
}
|
||||||
@ -181,10 +178,8 @@ pub async fn search_with_post(
|
|||||||
debug!("search called with params: {:?}", query);
|
debug!("search called with params: {:?}", query);
|
||||||
|
|
||||||
// Tenant token search_rules.
|
// Tenant token search_rules.
|
||||||
if let Some(search_rules) = index_scheduler
|
if let Some(search_rules) =
|
||||||
.filters()
|
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
|
||||||
.search_rules
|
|
||||||
.get_index_search_rules(&index_uid)
|
|
||||||
{
|
{
|
||||||
add_search_rules(&mut query, search_rules);
|
add_search_rules(&mut query, search_rules);
|
||||||
}
|
}
|
||||||
@ -213,13 +208,7 @@ mod test {
|
|||||||
let sort = fix_sort_query_parameters("_geoPoint(12, 13):asc");
|
let sort = fix_sort_query_parameters("_geoPoint(12, 13):asc");
|
||||||
assert_eq!(sort, vec!["_geoPoint(12,13):asc".to_string()]);
|
assert_eq!(sort, vec!["_geoPoint(12,13):asc".to_string()]);
|
||||||
let sort = fix_sort_query_parameters("doggo:asc,_geoPoint(12.45,13.56):desc");
|
let sort = fix_sort_query_parameters("doggo:asc,_geoPoint(12.45,13.56):desc");
|
||||||
assert_eq!(
|
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(12.45,13.56):desc".to_string(),]);
|
||||||
sort,
|
|
||||||
vec![
|
|
||||||
"doggo:asc".to_string(),
|
|
||||||
"_geoPoint(12.45,13.56):desc".to_string(),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
let sort = fix_sort_query_parameters(
|
let sort = fix_sort_query_parameters(
|
||||||
"doggo:asc , _geoPoint(12.45, 13.56, 2590352):desc , catto:desc",
|
"doggo:asc , _geoPoint(12.45, 13.56, 2590352):desc , catto:desc",
|
||||||
);
|
);
|
||||||
@ -233,12 +222,6 @@ mod test {
|
|||||||
);
|
);
|
||||||
let sort = fix_sort_query_parameters("doggo:asc , _geoPoint(1, 2), catto:desc");
|
let sort = fix_sort_query_parameters("doggo:asc , _geoPoint(1, 2), catto:desc");
|
||||||
// This is ugly but eh, I don't want to write a full parser just for this unused route
|
// This is ugly but eh, I don't want to write a full parser just for this unused route
|
||||||
assert_eq!(
|
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(1,2),catto:desc".to_string(),]);
|
||||||
sort,
|
|
||||||
vec![
|
|
||||||
"doggo:asc".to_string(),
|
|
||||||
"_geoPoint(1,2),catto:desc".to_string(),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use log::debug;
|
|
||||||
|
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::settings::{settings, Settings, Unchecked};
|
use meilisearch_types::settings::{settings, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::routes::SummarizedTaskView;
|
use crate::routes::SummarizedTaskView;
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
@ -18,16 +18,15 @@ macro_rules! make_setting_route {
|
|||||||
pub mod $attr {
|
pub mod $attr {
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||||
use log::debug;
|
|
||||||
|
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::milli::update::Setting;
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::settings::{settings, Settings};
|
use meilisearch_types::settings::{settings, Settings};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
|
|
||||||
use meilisearch_types::error::ResponseError;
|
|
||||||
use $crate::analytics::Analytics;
|
use $crate::analytics::Analytics;
|
||||||
use $crate::extractors::authentication::{policies::*, GuardedData};
|
use $crate::extractors::authentication::policies::*;
|
||||||
|
use $crate::extractors::authentication::GuardedData;
|
||||||
use $crate::extractors::sequential_extractor::SeqHandler;
|
use $crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use $crate::routes::SummarizedTaskView;
|
use $crate::routes::SummarizedTaskView;
|
||||||
|
|
||||||
@ -38,10 +37,7 @@ macro_rules! make_setting_route {
|
|||||||
>,
|
>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let new_settings = Settings {
|
let new_settings = Settings { $attr: Setting::Reset, ..Default::default() };
|
||||||
$attr: Setting::Reset,
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||||
let task = KindWithContent::Settings {
|
let task = KindWithContent::Settings {
|
||||||
@ -270,13 +266,7 @@ make_setting_route!(
|
|||||||
"synonyms"
|
"synonyms"
|
||||||
);
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!("/distinct-attribute", put, String, distinct_attribute, "distinctAttribute");
|
||||||
"/distinct-attribute",
|
|
||||||
put,
|
|
||||||
String,
|
|
||||||
distinct_attribute,
|
|
||||||
"distinctAttribute"
|
|
||||||
);
|
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/ranking-rules",
|
"/ranking-rules",
|
||||||
@ -453,9 +443,7 @@ pub async fn update_all(
|
|||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
};
|
};
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
@ -486,9 +474,7 @@ pub async fn delete_all(
|
|||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
};
|
};
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
|
||||||
use crate::routes::tasks::TaskView;
|
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
@ -10,6 +7,11 @@ use meilisearch_types::error::{Code, ResponseError};
|
|||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
use crate::routes::tasks::TaskView;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(indexes_swap))));
|
cfg.service(web::resource("").route(web::post().to(SeqHandler(indexes_swap))));
|
||||||
}
|
}
|
||||||
@ -33,10 +35,7 @@ pub async fn indexes_swap(
|
|||||||
|
|
||||||
let mut swaps = vec![];
|
let mut swaps = vec![];
|
||||||
let mut indexes_set = HashSet::<String>::default();
|
let mut indexes_set = HashSet::<String>::default();
|
||||||
for IndexesSwapPayload {
|
for IndexesSwapPayload { indexes: (lhs, rhs) } in params.into_inner().into_iter() {
|
||||||
indexes: (lhs, rhs),
|
|
||||||
} in params.into_inner().into_iter()
|
|
||||||
{
|
|
||||||
if !search_rules.is_index_authorized(&lhs) || !search_rules.is_index_authorized(&lhs) {
|
if !search_rules.is_index_authorized(&lhs) || !search_rules.is_index_authorized(&lhs) {
|
||||||
return Err(ResponseError::from_msg(
|
return Err(ResponseError::from_msg(
|
||||||
"TODO: error message when we swap with an index were not allowed to access"
|
"TODO: error message when we swap with an index were not allowed to access"
|
||||||
|
@ -12,10 +12,10 @@ use serde::{Deserialize, Serialize};
|
|||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
|
||||||
|
|
||||||
use self::indexes::IndexStats;
|
use self::indexes::IndexStats;
|
||||||
|
use crate::analytics::Analytics;
|
||||||
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
|
||||||
mod api_key;
|
mod api_key;
|
||||||
mod dump;
|
mod dump;
|
||||||
@ -102,11 +102,7 @@ impl Pagination {
|
|||||||
T: Serialize,
|
T: Serialize,
|
||||||
{
|
{
|
||||||
let total = content.len();
|
let total = content.len();
|
||||||
let content: Vec<_> = content
|
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
|
||||||
.into_iter()
|
|
||||||
.skip(self.offset)
|
|
||||||
.take(self.limit)
|
|
||||||
.collect();
|
|
||||||
self.format_with(total, content)
|
self.format_with(total, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,11 +115,7 @@ impl Pagination {
|
|||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize,
|
||||||
{
|
{
|
||||||
let content: Vec<_> = content
|
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
|
||||||
.into_iter()
|
|
||||||
.skip(self.offset)
|
|
||||||
.take(self.limit)
|
|
||||||
.collect();
|
|
||||||
self.format_with(total, content)
|
self.format_with(total, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,23 +125,13 @@ impl Pagination {
|
|||||||
where
|
where
|
||||||
T: Serialize,
|
T: Serialize,
|
||||||
{
|
{
|
||||||
PaginationView {
|
PaginationView { results, offset: self.offset, limit: self.limit, total }
|
||||||
results,
|
|
||||||
offset: self.offset,
|
|
||||||
limit: self.limit,
|
|
||||||
total,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> PaginationView<T> {
|
impl<T> PaginationView<T> {
|
||||||
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
|
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
|
||||||
Self {
|
Self { offset, limit, results, total }
|
||||||
offset,
|
|
||||||
limit,
|
|
||||||
results,
|
|
||||||
total,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -211,10 +193,7 @@ pub struct EnqueuedUpdateResult {
|
|||||||
pub update_type: UpdateType,
|
pub update_type: UpdateType,
|
||||||
#[serde(with = "time::serde::rfc3339")]
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
pub enqueued_at: OffsetDateTime,
|
pub enqueued_at: OffsetDateTime,
|
||||||
#[serde(
|
#[serde(skip_serializing_if = "Option::is_none", with = "time::serde::rfc3339::option")]
|
||||||
skip_serializing_if = "Option::is_none",
|
|
||||||
with = "time::serde::rfc3339::option"
|
|
||||||
)]
|
|
||||||
pub started_processing_at: Option<OffsetDateTime>,
|
pub started_processing_at: Option<OffsetDateTime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -275,11 +254,7 @@ async fn get_stats(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
analytics.publish(
|
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req));
|
||||||
"Stats Seen".to_string(),
|
|
||||||
json!({ "per_index_uid": false }),
|
|
||||||
Some(&req),
|
|
||||||
);
|
|
||||||
let search_rules = &index_scheduler.filters().search_rules;
|
let search_rules = &index_scheduler.filters().search_rules;
|
||||||
|
|
||||||
let stats = create_all_stats((*index_scheduler).clone(), search_rules)?;
|
let stats = create_all_stats((*index_scheduler).clone(), search_rules)?;
|
||||||
@ -300,9 +275,7 @@ pub fn create_all_stats(
|
|||||||
limit: Some(1),
|
limit: Some(1),
|
||||||
..Query::default()
|
..Query::default()
|
||||||
})?;
|
})?;
|
||||||
let processing_index = processing_task
|
let processing_index = processing_task.first().and_then(|task| task.index_uid().clone());
|
||||||
.first()
|
|
||||||
.and_then(|task| task.index_uid().clone());
|
|
||||||
for (name, index) in index_scheduler.indexes()? {
|
for (name, index) in index_scheduler.indexes()? {
|
||||||
if !search_rules.is_index_authorized(&name) {
|
if !search_rules.is_index_authorized(&name) {
|
||||||
continue;
|
continue;
|
||||||
@ -313,9 +286,7 @@ pub fn create_all_stats(
|
|||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
let stats = IndexStats {
|
let stats = IndexStats {
|
||||||
number_of_documents: index.number_of_documents(&rtxn)?,
|
number_of_documents: index.number_of_documents(&rtxn)?,
|
||||||
is_indexing: processing_index
|
is_indexing: processing_index.as_deref().map_or(false, |index_name| name == index_name),
|
||||||
.as_deref()
|
|
||||||
.map_or(false, |index_name| name == index_name),
|
|
||||||
field_distribution: index.field_distribution(&rtxn)?,
|
field_distribution: index.field_distribution(&rtxn)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -324,11 +295,7 @@ pub fn create_all_stats(
|
|||||||
|
|
||||||
indexes.insert(name, stats);
|
indexes.insert(name, stats);
|
||||||
}
|
}
|
||||||
let stats = Stats {
|
let stats = Stats { database_size, last_update: last_task, indexes };
|
||||||
database_size,
|
|
||||||
last_update: last_task,
|
|
||||||
indexes,
|
|
||||||
};
|
|
||||||
Ok(stats)
|
Ok(stats)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,11 +12,11 @@ use serde_json::json;
|
|||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
use tokio::task::block_in_place;
|
use tokio::task::block_in_place;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
|
||||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
|
||||||
|
|
||||||
use super::fold_star_or;
|
use super::fold_star_or;
|
||||||
|
use crate::analytics::Analytics;
|
||||||
|
use crate::extractors::authentication::policies::*;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
const DEFAULT_LIMIT: fn() -> u32 = || 20;
|
const DEFAULT_LIMIT: fn() -> u32 = || 20;
|
||||||
|
|
||||||
@ -80,10 +80,7 @@ impl TaskView {
|
|||||||
canceled_by: task.canceled_by,
|
canceled_by: task.canceled_by,
|
||||||
details: task.details.clone().map(DetailsView::from),
|
details: task.details.clone().map(DetailsView::from),
|
||||||
error: task.error.clone(),
|
error: task.error.clone(),
|
||||||
duration: task
|
duration: task.started_at.zip(task.finished_at).map(|(start, end)| end - start),
|
||||||
.started_at
|
|
||||||
.zip(task.finished_at)
|
|
||||||
.map(|(start, end)| end - start),
|
|
||||||
enqueued_at: task.enqueued_at,
|
enqueued_at: task.enqueued_at,
|
||||||
started_at: task.started_at,
|
started_at: task.started_at,
|
||||||
finished_at: task.finished_at,
|
finished_at: task.finished_at,
|
||||||
@ -124,62 +121,45 @@ pub struct DetailsView {
|
|||||||
impl From<Details> for DetailsView {
|
impl From<Details> for DetailsView {
|
||||||
fn from(details: Details) -> Self {
|
fn from(details: Details) -> Self {
|
||||||
match details.clone() {
|
match details.clone() {
|
||||||
Details::DocumentAddition {
|
Details::DocumentAddition { received_documents, indexed_documents } => DetailsView {
|
||||||
received_documents,
|
|
||||||
indexed_documents,
|
|
||||||
} => DetailsView {
|
|
||||||
received_documents: Some(received_documents),
|
received_documents: Some(received_documents),
|
||||||
indexed_documents,
|
indexed_documents,
|
||||||
..DetailsView::default()
|
..DetailsView::default()
|
||||||
},
|
},
|
||||||
Details::Settings { settings } => DetailsView {
|
Details::Settings { settings } => {
|
||||||
settings: Some(settings),
|
DetailsView { settings: Some(settings), ..DetailsView::default() }
|
||||||
..DetailsView::default()
|
}
|
||||||
},
|
Details::IndexInfo { primary_key } => {
|
||||||
Details::IndexInfo { primary_key } => DetailsView {
|
DetailsView { primary_key: Some(primary_key), ..DetailsView::default() }
|
||||||
primary_key: Some(primary_key),
|
}
|
||||||
..DetailsView::default()
|
Details::DocumentDeletion { received_document_ids, deleted_documents } => DetailsView {
|
||||||
},
|
|
||||||
Details::DocumentDeletion {
|
|
||||||
received_document_ids,
|
|
||||||
deleted_documents,
|
|
||||||
} => DetailsView {
|
|
||||||
received_document_ids: Some(received_document_ids),
|
received_document_ids: Some(received_document_ids),
|
||||||
deleted_documents: Some(deleted_documents),
|
deleted_documents: Some(deleted_documents),
|
||||||
..DetailsView::default()
|
..DetailsView::default()
|
||||||
},
|
},
|
||||||
Details::ClearAll { deleted_documents } => DetailsView {
|
Details::ClearAll { deleted_documents } => {
|
||||||
deleted_documents: Some(deleted_documents),
|
DetailsView { deleted_documents: Some(deleted_documents), ..DetailsView::default() }
|
||||||
..DetailsView::default()
|
}
|
||||||
},
|
Details::TaskCancelation { matched_tasks, canceled_tasks, original_query } => {
|
||||||
Details::TaskCancelation {
|
DetailsView {
|
||||||
matched_tasks,
|
matched_tasks: Some(matched_tasks),
|
||||||
canceled_tasks,
|
canceled_tasks: Some(canceled_tasks),
|
||||||
original_query,
|
original_query: Some(original_query),
|
||||||
} => DetailsView {
|
..DetailsView::default()
|
||||||
matched_tasks: Some(matched_tasks),
|
}
|
||||||
canceled_tasks: Some(canceled_tasks),
|
}
|
||||||
original_query: Some(original_query),
|
Details::TaskDeletion { matched_tasks, deleted_tasks, original_query } => DetailsView {
|
||||||
..DetailsView::default()
|
|
||||||
},
|
|
||||||
Details::TaskDeletion {
|
|
||||||
matched_tasks,
|
|
||||||
deleted_tasks,
|
|
||||||
original_query,
|
|
||||||
} => DetailsView {
|
|
||||||
matched_tasks: Some(matched_tasks),
|
matched_tasks: Some(matched_tasks),
|
||||||
deleted_tasks: Some(deleted_tasks),
|
deleted_tasks: Some(deleted_tasks),
|
||||||
original_query: Some(original_query),
|
original_query: Some(original_query),
|
||||||
..DetailsView::default()
|
..DetailsView::default()
|
||||||
},
|
},
|
||||||
Details::Dump { dump_uid } => DetailsView {
|
Details::Dump { dump_uid } => {
|
||||||
dump_uid: Some(dump_uid),
|
DetailsView { dump_uid: Some(dump_uid), ..DetailsView::default() }
|
||||||
..DetailsView::default()
|
}
|
||||||
},
|
Details::IndexSwap { swaps } => {
|
||||||
Details::IndexSwap { swaps } => DetailsView {
|
DetailsView { indexes: Some(swaps), ..Default::default() }
|
||||||
indexes: Some(swaps),
|
}
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -318,10 +298,8 @@ async fn cancel_tasks(
|
|||||||
|
|
||||||
let filtered_query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
|
let filtered_query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
|
||||||
let tasks = index_scheduler.get_task_ids(&filtered_query)?;
|
let tasks = index_scheduler.get_task_ids(&filtered_query)?;
|
||||||
let task_cancelation = KindWithContent::TaskCancelation {
|
let task_cancelation =
|
||||||
query: req.query_string().to_string(),
|
KindWithContent::TaskCancelation { query: req.query_string().to_string(), tasks };
|
||||||
tasks,
|
|
||||||
};
|
|
||||||
|
|
||||||
let task = block_in_place(|| index_scheduler.register(task_cancelation))?;
|
let task = block_in_place(|| index_scheduler.register(task_cancelation))?;
|
||||||
let task_view = TaskView::from_task(&task);
|
let task_view = TaskView::from_task(&task);
|
||||||
@ -377,10 +355,8 @@ async fn delete_tasks(
|
|||||||
|
|
||||||
let filtered_query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
|
let filtered_query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
|
||||||
let tasks = index_scheduler.get_task_ids(&filtered_query)?;
|
let tasks = index_scheduler.get_task_ids(&filtered_query)?;
|
||||||
let task_deletion = KindWithContent::TaskDeletion {
|
let task_deletion =
|
||||||
query: req.query_string().to_string(),
|
KindWithContent::TaskDeletion { query: req.query_string().to_string(), tasks };
|
||||||
tasks,
|
|
||||||
};
|
|
||||||
|
|
||||||
let task = block_in_place(|| index_scheduler.register(task_deletion))?;
|
let task = block_in_place(|| index_scheduler.register(task_deletion))?;
|
||||||
let task_view = TaskView::from_task(&task);
|
let task_view = TaskView::from_task(&task);
|
||||||
@ -448,11 +424,8 @@ async fn get_tasks(
|
|||||||
};
|
};
|
||||||
let query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
|
let query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
|
||||||
|
|
||||||
let mut tasks_results: Vec<TaskView> = index_scheduler
|
let mut tasks_results: Vec<TaskView> =
|
||||||
.get_tasks(query)?
|
index_scheduler.get_tasks(query)?.into_iter().map(|t| TaskView::from_task(&t)).collect();
|
||||||
.into_iter()
|
|
||||||
.map(|t| TaskView::from_task(&t))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// If we were able to fetch the number +1 tasks we asked
|
// If we were able to fetch the number +1 tasks we asked
|
||||||
// it means that there is more to come.
|
// it means that there is more to come.
|
||||||
@ -483,11 +456,7 @@ async fn get_task(
|
|||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let task_id = task_id.into_inner();
|
let task_id = task_id.into_inner();
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish("Tasks Seen".to_string(), json!({ "per_task_uid": true }), Some(&req));
|
||||||
"Tasks Seen".to_string(),
|
|
||||||
json!({ "per_task_uid": true }),
|
|
||||||
Some(&req),
|
|
||||||
);
|
|
||||||
|
|
||||||
let search_rules = &index_scheduler.filters().search_rules;
|
let search_rules = &index_scheduler.filters().search_rules;
|
||||||
let mut filters = index_scheduler::Query::default();
|
let mut filters = index_scheduler::Query::default();
|
||||||
@ -541,10 +510,9 @@ fn filter_out_inaccessible_indexes_from_query<const ACTION: u8>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) mod date_deserializer {
|
pub(crate) mod date_deserializer {
|
||||||
use time::{
|
use time::format_description::well_known::Rfc3339;
|
||||||
format_description::well_known::Rfc3339, macros::format_description, Date, Duration,
|
use time::macros::format_description;
|
||||||
OffsetDateTime, Time,
|
use time::{Date, Duration, OffsetDateTime, Time};
|
||||||
};
|
|
||||||
|
|
||||||
enum DeserializeDateOption {
|
enum DeserializeDateOption {
|
||||||
Before,
|
Before,
|
||||||
@ -586,10 +554,11 @@ pub(crate) mod date_deserializer {
|
|||||||
|
|
||||||
/// Deserialize an upper bound datetime with RFC3339 or YYYY-MM-DD.
|
/// Deserialize an upper bound datetime with RFC3339 or YYYY-MM-DD.
|
||||||
pub(crate) mod before {
|
pub(crate) mod before {
|
||||||
use super::{deserialize_date, DeserializeDateOption};
|
|
||||||
use serde::Deserializer;
|
use serde::Deserializer;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
use super::{deserialize_date, DeserializeDateOption};
|
||||||
|
|
||||||
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
|
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
|
||||||
pub fn deserialize<'a, D: Deserializer<'a>>(
|
pub fn deserialize<'a, D: Deserializer<'a>>(
|
||||||
deserializer: D,
|
deserializer: D,
|
||||||
@ -638,10 +607,11 @@ pub(crate) mod date_deserializer {
|
|||||||
///
|
///
|
||||||
/// If YYYY-MM-DD is used, the day is incremented by one.
|
/// If YYYY-MM-DD is used, the day is incremented by one.
|
||||||
pub(crate) mod after {
|
pub(crate) mod after {
|
||||||
use super::{deserialize_date, DeserializeDateOption};
|
|
||||||
use serde::Deserializer;
|
use serde::Deserializer;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
use super::{deserialize_date, DeserializeDateOption};
|
||||||
|
|
||||||
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
|
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
|
||||||
pub fn deserialize<'a, D: Deserializer<'a>>(
|
pub fn deserialize<'a, D: Deserializer<'a>>(
|
||||||
deserializer: D,
|
deserializer: D,
|
||||||
@ -689,9 +659,10 @@ pub(crate) mod date_deserializer {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::routes::tasks::TaskDeletionQuery;
|
|
||||||
use meili_snap::snapshot;
|
use meili_snap::snapshot;
|
||||||
|
|
||||||
|
use crate::routes::tasks::TaskDeletionQuery;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn deserialize_task_deletion_query_datetime() {
|
fn deserialize_task_deletion_query_datetime() {
|
||||||
{
|
{
|
||||||
|
@ -145,12 +145,7 @@ pub fn perform_search(
|
|||||||
search.sort_criteria(sort);
|
search.sort_criteria(sort);
|
||||||
}
|
}
|
||||||
|
|
||||||
let milli::SearchResult {
|
let milli::SearchResult { documents_ids, matching_words, candidates, .. } = search.execute()?;
|
||||||
documents_ids,
|
|
||||||
matching_words,
|
|
||||||
candidates,
|
|
||||||
..
|
|
||||||
} = search.execute()?;
|
|
||||||
|
|
||||||
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
|
||||||
@ -240,11 +235,7 @@ pub fn perform_search(
|
|||||||
insert_geo_distance(sort, &mut document);
|
insert_geo_distance(sort, &mut document);
|
||||||
}
|
}
|
||||||
|
|
||||||
let hit = SearchHit {
|
let hit = SearchHit { document, formatted, matches_position };
|
||||||
document,
|
|
||||||
formatted,
|
|
||||||
matches_position,
|
|
||||||
};
|
|
||||||
documents.push(hit);
|
documents.push(hit);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -289,10 +280,7 @@ fn insert_geo_distance(sorts: &[String], document: &mut Document) {
|
|||||||
};
|
};
|
||||||
if let Some(capture_group) = sorts.iter().find_map(|sort| GEO_REGEX.captures(sort)) {
|
if let Some(capture_group) = sorts.iter().find_map(|sort| GEO_REGEX.captures(sort)) {
|
||||||
// TODO: TAMO: milli encountered an internal error, what do we want to do?
|
// TODO: TAMO: milli encountered an internal error, what do we want to do?
|
||||||
let base = [
|
let base = [capture_group[1].parse().unwrap(), capture_group[2].parse().unwrap()];
|
||||||
capture_group[1].parse().unwrap(),
|
|
||||||
capture_group[2].parse().unwrap(),
|
|
||||||
];
|
|
||||||
let geo_point = &document.get("_geo").unwrap_or(&json!(null));
|
let geo_point = &document.get("_geo").unwrap_or(&json!(null));
|
||||||
if let Some((lat, lng)) = geo_point["lat"].as_f64().zip(geo_point["lng"].as_f64()) {
|
if let Some((lat, lng)) = geo_point["lat"].as_f64().zip(geo_point["lng"].as_f64()) {
|
||||||
let distance = milli::distance_between_two_points(&base, &[lat, lng]);
|
let distance = milli::distance_between_two_points(&base, &[lat, lng]);
|
||||||
@ -341,10 +329,7 @@ fn add_highlight_to_formatted_options(
|
|||||||
displayed_ids: &BTreeSet<FieldId>,
|
displayed_ids: &BTreeSet<FieldId>,
|
||||||
) {
|
) {
|
||||||
for attr in attr_to_highlight {
|
for attr in attr_to_highlight {
|
||||||
let new_format = FormatOptions {
|
let new_format = FormatOptions { highlight: true, crop: None };
|
||||||
highlight: true,
|
|
||||||
crop: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
if attr == "*" {
|
if attr == "*" {
|
||||||
for id in displayed_ids {
|
for id in displayed_ids {
|
||||||
@ -383,10 +368,7 @@ fn add_crop_to_formatted_options(
|
|||||||
formatted_options
|
formatted_options
|
||||||
.entry(*id)
|
.entry(*id)
|
||||||
.and_modify(|f| f.crop = Some(attr_len))
|
.and_modify(|f| f.crop = Some(attr_len))
|
||||||
.or_insert(FormatOptions {
|
.or_insert(FormatOptions { highlight: false, crop: Some(attr_len) });
|
||||||
highlight: false,
|
|
||||||
crop: Some(attr_len),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -395,10 +377,7 @@ fn add_crop_to_formatted_options(
|
|||||||
formatted_options
|
formatted_options
|
||||||
.entry(id)
|
.entry(id)
|
||||||
.and_modify(|f| f.crop = Some(attr_len))
|
.and_modify(|f| f.crop = Some(attr_len))
|
||||||
.or_insert(FormatOptions {
|
.or_insert(FormatOptions { highlight: false, crop: Some(attr_len) });
|
||||||
highlight: false,
|
|
||||||
crop: Some(attr_len),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -409,10 +388,7 @@ fn add_non_formatted_ids_to_formatted_options(
|
|||||||
to_retrieve_ids: &BTreeSet<FieldId>,
|
to_retrieve_ids: &BTreeSet<FieldId>,
|
||||||
) {
|
) {
|
||||||
for id in to_retrieve_ids {
|
for id in to_retrieve_ids {
|
||||||
formatted_options.entry(*id).or_insert(FormatOptions {
|
formatted_options.entry(*id).or_insert(FormatOptions { highlight: false, crop: None });
|
||||||
highlight: false,
|
|
||||||
crop: None,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -426,10 +402,7 @@ fn make_document(
|
|||||||
// recreate the original json
|
// recreate the original json
|
||||||
for (key, value) in obkv.iter() {
|
for (key, value) in obkv.iter() {
|
||||||
let value = serde_json::from_slice(value)?;
|
let value = serde_json::from_slice(value)?;
|
||||||
let key = field_ids_map
|
let key = field_ids_map.name(key).expect("Missing field name").to_string();
|
||||||
.name(key)
|
|
||||||
.expect("Missing field name")
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
document.insert(key, value);
|
document.insert(key, value);
|
||||||
}
|
}
|
||||||
@ -455,9 +428,8 @@ fn format_fields<'a, A: AsRef<[u8]>>(
|
|||||||
let mut document = document.clone();
|
let mut document = document.clone();
|
||||||
|
|
||||||
// select the attributes to retrieve
|
// select the attributes to retrieve
|
||||||
let displayable_names = displayable_ids
|
let displayable_names =
|
||||||
.iter()
|
displayable_ids.iter().map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
|
||||||
.map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
|
|
||||||
permissive_json_pointer::map_leaf_values(&mut document, displayable_names, |key, value| {
|
permissive_json_pointer::map_leaf_values(&mut document, displayable_names, |key, value| {
|
||||||
// To get the formatting option of each key we need to see all the rules that applies
|
// To get the formatting option of each key we need to see all the rules that applies
|
||||||
// to the value and merge them together. eg. If a user said he wanted to highlight `doggo`
|
// to the value and merge them together. eg. If a user said he wanted to highlight `doggo`
|
||||||
@ -473,13 +445,7 @@ fn format_fields<'a, A: AsRef<[u8]>>(
|
|||||||
.reduce(|acc, option| acc.merge(option));
|
.reduce(|acc, option| acc.merge(option));
|
||||||
let mut infos = Vec::new();
|
let mut infos = Vec::new();
|
||||||
|
|
||||||
*value = format_value(
|
*value = format_value(std::mem::take(value), builder, format, &mut infos, compute_matches);
|
||||||
std::mem::take(value),
|
|
||||||
builder,
|
|
||||||
format,
|
|
||||||
&mut infos,
|
|
||||||
compute_matches,
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(matches) = matches_position.as_mut() {
|
if let Some(matches) = matches_position.as_mut() {
|
||||||
if !infos.is_empty() {
|
if !infos.is_empty() {
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
use crate::common::Server;
|
use std::{thread, time};
|
||||||
|
|
||||||
use assert_json_diff::assert_json_include;
|
use assert_json_diff::assert_json_include;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::{thread, time};
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn add_valid_api_key() {
|
async fn add_valid_api_key() {
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
use crate::common::Server;
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
use ::time::format_description::well_known::Rfc3339;
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::{hashmap, hashset};
|
use maplit::{hashmap, hashset};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::{HashMap, HashSet};
|
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||||
Lazy::new(|| {
|
Lazy::new(|| {
|
||||||
let mut authorizations = hashmap! {
|
let mut authorizations = hashmap! {
|
||||||
@ -57,21 +59,14 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
|||||||
};
|
};
|
||||||
|
|
||||||
if cfg!(feature = "metrics") {
|
if cfg!(feature = "metrics") {
|
||||||
authorizations.insert(
|
authorizations.insert(("GET", "/metrics"), hashset! {"metrics.get", "metrics.*", "*"});
|
||||||
("GET", "/metrics"),
|
|
||||||
hashset! {"metrics.get", "metrics.*", "*"},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
authorizations
|
authorizations
|
||||||
});
|
});
|
||||||
|
|
||||||
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
||||||
AUTHORIZATIONS
|
AUTHORIZATIONS.values().cloned().reduce(|l, r| l.union(&r).cloned().collect()).unwrap()
|
||||||
.values()
|
|
||||||
.cloned()
|
|
||||||
.reduce(|l, r| l.union(&r).cloned().collect())
|
|
||||||
.unwrap()
|
|
||||||
});
|
});
|
||||||
|
|
||||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||||
@ -109,13 +104,7 @@ async fn error_access_expired_key() {
|
|||||||
for (method, route) in AUTHORIZATIONS.keys() {
|
for (method, route) in AUTHORIZATIONS.keys() {
|
||||||
let (response, code) = server.dummy_request(method, route).await;
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
||||||
response,
|
|
||||||
INVALID_RESPONSE.clone(),
|
|
||||||
"on route: {:?} - {:?}",
|
|
||||||
method,
|
|
||||||
route
|
|
||||||
);
|
|
||||||
assert_eq!(403, code, "{:?}", &response);
|
assert_eq!(403, code, "{:?}", &response);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -146,13 +135,7 @@ async fn error_access_unauthorized_index() {
|
|||||||
{
|
{
|
||||||
let (response, code) = server.dummy_request(method, route).await;
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
||||||
response,
|
|
||||||
INVALID_RESPONSE.clone(),
|
|
||||||
"on route: {:?} - {:?}",
|
|
||||||
method,
|
|
||||||
route
|
|
||||||
);
|
|
||||||
assert_eq!(403, code, "{:?}", &response);
|
assert_eq!(403, code, "{:?}", &response);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -180,13 +163,7 @@ async fn error_access_unauthorized_action() {
|
|||||||
server.use_api_key(&key);
|
server.use_api_key(&key);
|
||||||
let (response, code) = server.dummy_request(method, route).await;
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
||||||
response,
|
|
||||||
INVALID_RESPONSE.clone(),
|
|
||||||
"on route: {:?} - {:?}",
|
|
||||||
method,
|
|
||||||
route
|
|
||||||
);
|
|
||||||
assert_eq!(403, code, "{:?}", &response);
|
assert_eq!(403, code, "{:?}", &response);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -201,13 +178,7 @@ async fn access_authorized_master_key() {
|
|||||||
for ((method, route), _) in AUTHORIZATIONS.iter() {
|
for ((method, route), _) in AUTHORIZATIONS.iter() {
|
||||||
let (response, code) = server.dummy_request(method, route).await;
|
let (response, code) = server.dummy_request(method, route).await;
|
||||||
|
|
||||||
assert_ne!(
|
assert_ne!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
||||||
response,
|
|
||||||
INVALID_RESPONSE.clone(),
|
|
||||||
"on route: {:?} - {:?}",
|
|
||||||
method,
|
|
||||||
route
|
|
||||||
);
|
|
||||||
assert_ne!(code, 403);
|
assert_ne!(code, 403);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,11 +3,11 @@ mod authorization;
|
|||||||
mod payload;
|
mod payload;
|
||||||
mod tenant_token;
|
mod tenant_token;
|
||||||
|
|
||||||
use crate::common::Server;
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
|
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
impl Server {
|
impl Server {
|
||||||
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
||||||
self.service.api_key = Some(api_key.as_ref().to_string());
|
self.service.api_key = Some(api_key.as_ref().to_string());
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
use crate::common::Server;
|
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_api_key_bad_content_types() {
|
async fn error_api_key_bad_content_types() {
|
||||||
let content = json!({
|
let content = json!({
|
||||||
@ -36,10 +37,7 @@ async fn error_api_key_bad_content_types() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "invalid_content_type");
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
|
||||||
);
|
|
||||||
|
|
||||||
// patch
|
// patch
|
||||||
let req = test::TestRequest::patch()
|
let req = test::TestRequest::patch()
|
||||||
@ -61,10 +59,7 @@ async fn error_api_key_bad_content_types() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "invalid_content_type");
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -101,10 +96,7 @@ async fn error_api_key_empty_content_types() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "invalid_content_type");
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
|
||||||
);
|
|
||||||
|
|
||||||
// patch
|
// patch
|
||||||
let req = test::TestRequest::patch()
|
let req = test::TestRequest::patch()
|
||||||
@ -126,10 +118,7 @@ async fn error_api_key_empty_content_types() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "invalid_content_type");
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -165,10 +154,7 @@ async fn error_api_key_missing_content_types() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "missing_content_type");
|
assert_eq!(response["code"], "missing_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#missing_content_type"
|
|
||||||
);
|
|
||||||
|
|
||||||
// patch
|
// patch
|
||||||
let req = test::TestRequest::patch()
|
let req = test::TestRequest::patch()
|
||||||
@ -189,10 +175,7 @@ async fn error_api_key_missing_content_types() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "missing_content_type");
|
assert_eq!(response["code"], "missing_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#missing_content_type"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -217,10 +200,7 @@ async fn error_api_key_empty_payload() {
|
|||||||
assert_eq!(status_code, 400);
|
assert_eq!(status_code, 400);
|
||||||
assert_eq!(response["code"], json!("missing_payload"));
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
|
||||||
);
|
|
||||||
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
||||||
|
|
||||||
// patch
|
// patch
|
||||||
@ -237,10 +217,7 @@ async fn error_api_key_empty_payload() {
|
|||||||
assert_eq!(status_code, 400);
|
assert_eq!(status_code, 400);
|
||||||
assert_eq!(response["code"], json!("missing_payload"));
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
|
||||||
);
|
|
||||||
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -266,10 +243,7 @@ async fn error_api_key_malformed_payload() {
|
|||||||
assert_eq!(status_code, 400);
|
assert_eq!(status_code, 400);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response["message"],
|
response["message"],
|
||||||
json!(
|
json!(
|
||||||
@ -291,10 +265,7 @@ async fn error_api_key_malformed_payload() {
|
|||||||
assert_eq!(status_code, 400);
|
assert_eq!(status_code, 400);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response["message"],
|
response["message"],
|
||||||
json!(
|
json!(
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
use crate::common::Server;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use ::time::format_description::well_known::Rfc3339;
|
use ::time::format_description::well_known::Rfc3339;
|
||||||
use maplit::hashmap;
|
use maplit::hashmap;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::HashMap;
|
|
||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
|
|
||||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
fn generate_tenant_token(
|
fn generate_tenant_token(
|
||||||
parent_uid: impl AsRef<str>,
|
parent_uid: impl AsRef<str>,
|
||||||
@ -17,12 +18,8 @@ fn generate_tenant_token(
|
|||||||
|
|
||||||
let parent_uid = parent_uid.as_ref();
|
let parent_uid = parent_uid.as_ref();
|
||||||
body.insert("apiKeyUid", json!(parent_uid));
|
body.insert("apiKeyUid", json!(parent_uid));
|
||||||
encode(
|
encode(&Header::default(), &body, &EncodingKey::from_secret(parent_key.as_ref().as_bytes()))
|
||||||
&Header::default(),
|
.unwrap()
|
||||||
&body,
|
|
||||||
&EncodingKey::from_secret(parent_key.as_ref().as_bytes()),
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
@ -513,18 +510,14 @@ async fn error_access_expired_parent_key() {
|
|||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
|
|
||||||
// test search request while parent_key is not expired
|
// test search request while parent_key is not expired
|
||||||
let (response, code) = server
|
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
||||||
.dummy_request("POST", "/indexes/products/search")
|
|
||||||
.await;
|
|
||||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||||
assert_ne!(code, 403);
|
assert_ne!(code, 403);
|
||||||
|
|
||||||
// wait until the key is expired.
|
// wait until the key is expired.
|
||||||
thread::sleep(time::Duration::new(1, 0));
|
thread::sleep(time::Duration::new(1, 0));
|
||||||
|
|
||||||
let (response, code) = server
|
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
||||||
.dummy_request("POST", "/indexes/products/search")
|
|
||||||
.await;
|
|
||||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
assert_eq!(code, 403);
|
assert_eq!(code, 403);
|
||||||
}
|
}
|
||||||
@ -556,9 +549,7 @@ async fn error_access_modified_token() {
|
|||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
|
|
||||||
// test search request while web_token is valid
|
// test search request while web_token is valid
|
||||||
let (response, code) = server
|
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
||||||
.dummy_request("POST", "/indexes/products/search")
|
|
||||||
.await;
|
|
||||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||||
assert_ne!(code, 403);
|
assert_ne!(code, 403);
|
||||||
|
|
||||||
@ -576,9 +567,7 @@ async fn error_access_modified_token() {
|
|||||||
.join(".");
|
.join(".");
|
||||||
|
|
||||||
server.use_api_key(&altered_token);
|
server.use_api_key(&altered_token);
|
||||||
let (response, code) = server
|
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
||||||
.dummy_request("POST", "/indexes/products/search")
|
|
||||||
.await;
|
|
||||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
assert_eq!(code, 403);
|
assert_eq!(code, 403);
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
|
use std::io::{Read, Write};
|
||||||
|
|
||||||
use actix_http::header::TryIntoHeaderPair;
|
use actix_http::header::TryIntoHeaderPair;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use flate2::read::{GzDecoder, ZlibDecoder};
|
use flate2::read::{GzDecoder, ZlibDecoder};
|
||||||
use flate2::write::{GzEncoder, ZlibEncoder};
|
use flate2::write::{GzEncoder, ZlibEncoder};
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use std::io::{Read, Write};
|
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub enum Encoder {
|
pub enum Encoder {
|
||||||
@ -18,24 +19,18 @@ impl Encoder {
|
|||||||
match self {
|
match self {
|
||||||
Self::Gzip => {
|
Self::Gzip => {
|
||||||
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
|
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
|
||||||
encoder
|
encoder.write_all(&body.into()).expect("Failed to encode request body");
|
||||||
.write_all(&body.into())
|
|
||||||
.expect("Failed to encode request body");
|
|
||||||
encoder.finish().expect("Failed to encode request body")
|
encoder.finish().expect("Failed to encode request body")
|
||||||
}
|
}
|
||||||
Self::Deflate => {
|
Self::Deflate => {
|
||||||
let mut encoder = ZlibEncoder::new(Vec::new(), Compression::default());
|
let mut encoder = ZlibEncoder::new(Vec::new(), Compression::default());
|
||||||
encoder
|
encoder.write_all(&body.into()).expect("Failed to encode request body");
|
||||||
.write_all(&body.into())
|
|
||||||
.expect("Failed to encode request body");
|
|
||||||
encoder.finish().unwrap()
|
encoder.finish().unwrap()
|
||||||
}
|
}
|
||||||
Self::Plain => Vec::from(body.into()),
|
Self::Plain => Vec::from(body.into()),
|
||||||
Self::Brotli => {
|
Self::Brotli => {
|
||||||
let mut encoder = brotli::CompressorWriter::new(Vec::new(), 32 * 1024, 3, 22);
|
let mut encoder = brotli::CompressorWriter::new(Vec::new(), 32 * 1024, 3, 22);
|
||||||
encoder
|
encoder.write_all(&body.into()).expect("Failed to encode request body");
|
||||||
.write_all(&body.into())
|
|
||||||
.expect("Failed to encode request body");
|
|
||||||
encoder.flush().expect("Failed to encode request body");
|
encoder.flush().expect("Failed to encode request body");
|
||||||
encoder.into_inner()
|
encoder.into_inner()
|
||||||
}
|
}
|
||||||
@ -57,9 +52,7 @@ impl Encoder {
|
|||||||
.expect("Invalid zlib stream");
|
.expect("Invalid zlib stream");
|
||||||
}
|
}
|
||||||
Self::Plain => {
|
Self::Plain => {
|
||||||
buffer
|
buffer.write_all(input.as_ref()).expect("Unexpected memory copying issue");
|
||||||
.write_all(input.as_ref())
|
|
||||||
.expect("Unexpected memory copying issue");
|
|
||||||
}
|
}
|
||||||
Self::Brotli => {
|
Self::Brotli => {
|
||||||
brotli::Decompressor::new(input.as_ref(), 4096)
|
brotli::Decompressor::new(input.as_ref(), 4096)
|
||||||
@ -80,8 +73,6 @@ impl Encoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn iterator() -> impl Iterator<Item = Self> {
|
pub fn iterator() -> impl Iterator<Item = Self> {
|
||||||
[Self::Plain, Self::Gzip, Self::Deflate, Self::Brotli]
|
[Self::Plain, Self::Gzip, Self::Deflate, Self::Brotli].iter().copied()
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,17 +1,14 @@
|
|||||||
use std::{
|
use std::fmt::Write;
|
||||||
fmt::Write,
|
use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
|
||||||
panic::{catch_unwind, resume_unwind, UnwindSafe},
|
use std::time::Duration;
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
use urlencoding::encode as urlencode;
|
use urlencoding::encode as urlencode;
|
||||||
|
|
||||||
use super::service::Service;
|
|
||||||
|
|
||||||
use super::encoder::Encoder;
|
use super::encoder::Encoder;
|
||||||
|
use super::service::Service;
|
||||||
|
|
||||||
pub struct Index<'a> {
|
pub struct Index<'a> {
|
||||||
pub uid: String,
|
pub uid: String,
|
||||||
@ -28,10 +25,8 @@ impl Index<'_> {
|
|||||||
|
|
||||||
pub async fn load_test_set(&self) -> u64 {
|
pub async fn load_test_set(&self) -> u64 {
|
||||||
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
|
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
|
||||||
let (response, code) = self
|
let (response, code) =
|
||||||
.service
|
self.service.post_str(url, include_str!("../assets/test_set.json")).await;
|
||||||
.post_str(url, include_str!("../assets/test_set.json"))
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
let update_id = response["taskUid"].as_i64().unwrap();
|
let update_id = response["taskUid"].as_i64().unwrap();
|
||||||
self.wait_task(update_id as u64).await;
|
self.wait_task(update_id as u64).await;
|
||||||
@ -43,9 +38,7 @@ impl Index<'_> {
|
|||||||
"uid": self.uid,
|
"uid": self.uid,
|
||||||
"primaryKey": primary_key,
|
"primaryKey": primary_key,
|
||||||
});
|
});
|
||||||
self.service
|
self.service.post_encoded("/indexes", body, self.encoder).await
|
||||||
.post_encoded("/indexes", body, self.encoder)
|
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
pub async fn update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||||
@ -68,16 +61,12 @@ impl Index<'_> {
|
|||||||
primary_key: Option<&str>,
|
primary_key: Option<&str>,
|
||||||
) -> (Value, StatusCode) {
|
) -> (Value, StatusCode) {
|
||||||
let url = match primary_key {
|
let url = match primary_key {
|
||||||
Some(key) => format!(
|
Some(key) => {
|
||||||
"/indexes/{}/documents?primaryKey={}",
|
format!("/indexes/{}/documents?primaryKey={}", urlencode(self.uid.as_ref()), key)
|
||||||
urlencode(self.uid.as_ref()),
|
}
|
||||||
key
|
|
||||||
),
|
|
||||||
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
|
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
|
||||||
};
|
};
|
||||||
self.service
|
self.service.post_encoded(url, documents, self.encoder).await
|
||||||
.post_encoded(url, documents, self.encoder)
|
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_documents(
|
pub async fn update_documents(
|
||||||
@ -86,11 +75,9 @@ impl Index<'_> {
|
|||||||
primary_key: Option<&str>,
|
primary_key: Option<&str>,
|
||||||
) -> (Value, StatusCode) {
|
) -> (Value, StatusCode) {
|
||||||
let url = match primary_key {
|
let url = match primary_key {
|
||||||
Some(key) => format!(
|
Some(key) => {
|
||||||
"/indexes/{}/documents?primaryKey={}",
|
format!("/indexes/{}/documents?primaryKey={}", urlencode(self.uid.as_ref()), key)
|
||||||
urlencode(self.uid.as_ref()),
|
}
|
||||||
key
|
|
||||||
),
|
|
||||||
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
|
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
|
||||||
};
|
};
|
||||||
self.service.put_encoded(url, documents, self.encoder).await
|
self.service.put_encoded(url, documents, self.encoder).await
|
||||||
@ -174,13 +161,8 @@ impl Index<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
||||||
let url = format!(
|
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
|
||||||
"/indexes/{}/documents/delete-batch",
|
self.service.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder).await
|
||||||
urlencode(self.uid.as_ref())
|
|
||||||
);
|
|
||||||
self.service
|
|
||||||
.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder)
|
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn settings(&self) -> (Value, StatusCode) {
|
pub async fn settings(&self) -> (Value, StatusCode) {
|
||||||
@ -190,9 +172,7 @@ impl Index<'_> {
|
|||||||
|
|
||||||
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
||||||
self.service
|
self.service.patch_encoded(url, settings, self.encoder).await
|
||||||
.patch_encoded(url, settings, self.encoder)
|
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||||
@ -232,29 +212,19 @@ impl Index<'_> {
|
|||||||
|
|
||||||
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
|
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
|
||||||
let params = yaup::to_string(&query).unwrap();
|
let params = yaup::to_string(&query).unwrap();
|
||||||
let url = format!(
|
let url = format!("/indexes/{}/search?{}", urlencode(self.uid.as_ref()), params);
|
||||||
"/indexes/{}/search?{}",
|
|
||||||
urlencode(self.uid.as_ref()),
|
|
||||||
params
|
|
||||||
);
|
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
|
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
|
||||||
let url = format!(
|
let url =
|
||||||
"/indexes/{}/settings/{}",
|
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
|
||||||
urlencode(self.uid.as_ref()),
|
|
||||||
"distinct-attribute"
|
|
||||||
);
|
|
||||||
self.service.put_encoded(url, value, self.encoder).await
|
self.service.put_encoded(url, value, self.encoder).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
|
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
|
||||||
let url = format!(
|
let url =
|
||||||
"/indexes/{}/settings/{}",
|
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
|
||||||
urlencode(self.uid.as_ref()),
|
|
||||||
"distinct-attribute"
|
|
||||||
);
|
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,18 +15,10 @@ macro_rules! test_post_get_search {
|
|||||||
let get_query: meilisearch_http::routes::search::SearchQuery = post_query.into();
|
let get_query: meilisearch_http::routes::search::SearchQuery = post_query.into();
|
||||||
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
|
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
|
||||||
let ($response, $status_code) = $server.search_get(&get_query).await;
|
let ($response, $status_code) = $server.search_get(&get_query).await;
|
||||||
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
|
let _ = ::std::panic::catch_unwind(|| $block)
|
||||||
panic!(
|
.map_err(|e| panic!("panic in get route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||||
"panic in get route: {:?}",
|
|
||||||
e.downcast_ref::<&str>().unwrap()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
let ($response, $status_code) = $server.search_post($query).await;
|
let ($response, $status_code) = $server.search_post($query).await;
|
||||||
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
|
let _ = ::std::panic::catch_unwind(|| $block)
|
||||||
panic!(
|
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||||
"panic in post route: {:?}",
|
|
||||||
e.downcast_ref::<&str>().unwrap()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,23 +1,22 @@
|
|||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use actix_http::body::MessageBody;
|
|
||||||
use actix_web::dev::ServiceResponse;
|
|
||||||
use clap::Parser;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use actix_http::body::MessageBody;
|
||||||
|
use actix_web::dev::ServiceResponse;
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use byte_unit::{Byte, ByteUnit};
|
use byte_unit::{Byte, ByteUnit};
|
||||||
|
use clap::Parser;
|
||||||
|
use meilisearch_http::option::{IndexerOpts, MaxMemory, Opt};
|
||||||
|
use meilisearch_http::{analytics, create_app, setup_meilisearch};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
use meilisearch_http::option::{IndexerOpts, MaxMemory, Opt};
|
|
||||||
use meilisearch_http::{analytics, create_app, setup_meilisearch};
|
|
||||||
use crate::common::encoder::Encoder;
|
|
||||||
|
|
||||||
use super::index::Index;
|
use super::index::Index;
|
||||||
use super::service::Service;
|
use super::service::Service;
|
||||||
|
use crate::common::encoder::Encoder;
|
||||||
|
|
||||||
pub struct Server {
|
pub struct Server {
|
||||||
pub service: Service,
|
pub service: Service,
|
||||||
@ -40,17 +39,10 @@ impl Server {
|
|||||||
let options = default_settings(dir.path());
|
let options = default_settings(dir.path());
|
||||||
|
|
||||||
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
|
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
|
||||||
let service = Service {
|
let service =
|
||||||
index_scheduler: Arc::new(index_scheduler),
|
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
|
||||||
auth,
|
|
||||||
options,
|
|
||||||
api_key: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
Server {
|
Server { service, _dir: Some(dir) }
|
||||||
service,
|
|
||||||
_dir: Some(dir),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
||||||
@ -63,17 +55,10 @@ impl Server {
|
|||||||
options.master_key = Some("MASTER_KEY".to_string());
|
options.master_key = Some("MASTER_KEY".to_string());
|
||||||
|
|
||||||
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
|
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
|
||||||
let service = Service {
|
let service =
|
||||||
index_scheduler: Arc::new(index_scheduler),
|
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
|
||||||
auth,
|
|
||||||
options,
|
|
||||||
api_key: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
Server {
|
Server { service, _dir: Some(dir) }
|
||||||
service,
|
|
||||||
_dir: Some(dir),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn new_auth() -> Self {
|
pub async fn new_auth() -> Self {
|
||||||
@ -84,17 +69,10 @@ impl Server {
|
|||||||
|
|
||||||
pub async fn new_with_options(options: Opt) -> Result<Self, anyhow::Error> {
|
pub async fn new_with_options(options: Opt) -> Result<Self, anyhow::Error> {
|
||||||
let (index_scheduler, auth) = setup_meilisearch(&options)?;
|
let (index_scheduler, auth) = setup_meilisearch(&options)?;
|
||||||
let service = Service {
|
let service =
|
||||||
index_scheduler: Arc::new(index_scheduler),
|
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
|
||||||
auth,
|
|
||||||
options,
|
|
||||||
api_key: None,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Server {
|
Ok(Server { service, _dir: None })
|
||||||
service,
|
|
||||||
_dir: None,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn init_web_app(
|
pub async fn init_web_app(
|
||||||
@ -120,11 +98,7 @@ impl Server {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn index_with_encoder(&self, uid: impl AsRef<str>, encoder: Encoder) -> Index<'_> {
|
pub fn index_with_encoder(&self, uid: impl AsRef<str>, encoder: Encoder) -> Index<'_> {
|
||||||
Index {
|
Index { uid: uid.as_ref().to_string(), service: &self.service, encoder }
|
||||||
uid: uid.as_ref().to_string(),
|
|
||||||
service: &self.service,
|
|
||||||
encoder,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn list_indexes(
|
pub async fn list_indexes(
|
||||||
@ -142,9 +116,7 @@ impl Server {
|
|||||||
.map(|(offset, limit)| format!("{offset}&{limit}"))
|
.map(|(offset, limit)| format!("{offset}&{limit}"))
|
||||||
.or_else(|| offset.xor(limit));
|
.or_else(|| offset.xor(limit));
|
||||||
if let Some(query_parameter) = query_parameter {
|
if let Some(query_parameter) = query_parameter {
|
||||||
self.service
|
self.service.get(format!("/indexes?{query_parameter}")).await
|
||||||
.get(format!("/indexes?{query_parameter}"))
|
|
||||||
.await
|
|
||||||
} else {
|
} else {
|
||||||
self.service.get("/indexes").await
|
self.service.get("/indexes").await
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use actix_web::http::header::ContentType;
|
use actix_web::http::header::ContentType;
|
||||||
|
use actix_web::http::StatusCode;
|
||||||
|
use actix_web::test;
|
||||||
use actix_web::test::TestRequest;
|
use actix_web::test::TestRequest;
|
||||||
use actix_web::{http::StatusCode, test};
|
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
|
use meilisearch_http::{analytics, create_app, Opt};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use meilisearch_http::{analytics, create_app, Opt};
|
|
||||||
|
|
||||||
pub struct Service {
|
pub struct Service {
|
||||||
pub index_scheduler: Arc<IndexScheduler>,
|
pub index_scheduler: Arc<IndexScheduler>,
|
||||||
|
@ -2,10 +2,11 @@
|
|||||||
|
|
||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
use crate::common::Server;
|
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
enum HttpVerb {
|
enum HttpVerb {
|
||||||
Put,
|
Put,
|
||||||
Patch,
|
Patch,
|
||||||
@ -75,11 +76,7 @@ async fn error_json_bad_content_type() {
|
|||||||
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
||||||
|
|
||||||
// No content-type.
|
// No content-type.
|
||||||
let req = verb
|
let req = verb.test_request().uri(route).set_payload(document).to_request();
|
||||||
.test_request()
|
|
||||||
.uri(route)
|
|
||||||
.set_payload(document)
|
|
||||||
.to_request();
|
|
||||||
let res = test::call_service(&app, req).await;
|
let res = test::call_service(&app, req).await;
|
||||||
let status_code = res.status();
|
let status_code = res.status();
|
||||||
let body = test::read_body(res).await;
|
let body = test::read_body(res).await;
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
|
use serde_json::{json, Value};
|
||||||
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use serde_json::{json, Value};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
|
||||||
|
|
||||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -192,10 +193,7 @@ async fn error_add_documents_test_bad_content_types() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "invalid_content_type");
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
|
||||||
);
|
|
||||||
|
|
||||||
// put
|
// put
|
||||||
let req = test::TestRequest::put()
|
let req = test::TestRequest::put()
|
||||||
@ -216,10 +214,7 @@ async fn error_add_documents_test_bad_content_types() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "invalid_content_type");
|
assert_eq!(response["code"], "invalid_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#invalid_content_type"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// missing content-type must be refused
|
/// missing content-type must be refused
|
||||||
@ -253,10 +248,7 @@ async fn error_add_documents_test_no_content_type() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "missing_content_type");
|
assert_eq!(response["code"], "missing_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#missing_content_type"
|
|
||||||
);
|
|
||||||
|
|
||||||
// put
|
// put
|
||||||
let req = test::TestRequest::put()
|
let req = test::TestRequest::put()
|
||||||
@ -276,10 +268,7 @@ async fn error_add_documents_test_no_content_type() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], "missing_content_type");
|
assert_eq!(response["code"], "missing_content_type");
|
||||||
assert_eq!(response["type"], "invalid_request");
|
assert_eq!(response["type"], "invalid_request");
|
||||||
assert_eq!(
|
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
|
||||||
response["link"],
|
|
||||||
"https://docs.meilisearch.com/errors#missing_content_type"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -308,10 +297,7 @@ async fn error_add_malformed_csv_documents() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
|
|
||||||
// put
|
// put
|
||||||
let req = test::TestRequest::put()
|
let req = test::TestRequest::put()
|
||||||
@ -332,10 +318,7 @@ async fn error_add_malformed_csv_documents() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -364,10 +347,7 @@ async fn error_add_malformed_json_documents() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
|
|
||||||
// put
|
// put
|
||||||
let req = test::TestRequest::put()
|
let req = test::TestRequest::put()
|
||||||
@ -388,10 +368,7 @@ async fn error_add_malformed_json_documents() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
|
|
||||||
// truncate
|
// truncate
|
||||||
|
|
||||||
@ -416,10 +393,7 @@ async fn error_add_malformed_json_documents() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
|
|
||||||
// add one more char to the long string to test if the truncating works.
|
// add one more char to the long string to test if the truncating works.
|
||||||
let document = format!("\"{}m\"", long);
|
let document = format!("\"{}m\"", long);
|
||||||
@ -438,10 +412,7 @@ async fn error_add_malformed_json_documents() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -470,10 +441,7 @@ async fn error_add_malformed_ndjson_documents() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
|
|
||||||
// put
|
// put
|
||||||
let req = test::TestRequest::put()
|
let req = test::TestRequest::put()
|
||||||
@ -492,10 +460,7 @@ async fn error_add_malformed_ndjson_documents() {
|
|||||||
);
|
);
|
||||||
assert_eq!(response["code"], json!("malformed_payload"));
|
assert_eq!(response["code"], json!("malformed_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -519,10 +484,7 @@ async fn error_add_missing_payload_csv_documents() {
|
|||||||
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
|
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
|
||||||
assert_eq!(response["code"], json!("missing_payload"));
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
|
||||||
);
|
|
||||||
|
|
||||||
// put
|
// put
|
||||||
let req = test::TestRequest::put()
|
let req = test::TestRequest::put()
|
||||||
@ -538,10 +500,7 @@ async fn error_add_missing_payload_csv_documents() {
|
|||||||
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
|
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
|
||||||
assert_eq!(response["code"], json!("missing_payload"));
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -565,10 +524,7 @@ async fn error_add_missing_payload_json_documents() {
|
|||||||
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
||||||
assert_eq!(response["code"], json!("missing_payload"));
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
|
||||||
);
|
|
||||||
|
|
||||||
// put
|
// put
|
||||||
let req = test::TestRequest::put()
|
let req = test::TestRequest::put()
|
||||||
@ -584,10 +540,7 @@ async fn error_add_missing_payload_json_documents() {
|
|||||||
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
|
||||||
assert_eq!(response["code"], json!("missing_payload"));
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -608,16 +561,10 @@ async fn error_add_missing_payload_ndjson_documents() {
|
|||||||
let body = test::read_body(res).await;
|
let body = test::read_body(res).await;
|
||||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
assert_eq!(status_code, 400);
|
assert_eq!(status_code, 400);
|
||||||
assert_eq!(
|
assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#));
|
||||||
response["message"],
|
|
||||||
json!(r#"A ndjson payload is missing."#)
|
|
||||||
);
|
|
||||||
assert_eq!(response["code"], json!("missing_payload"));
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
|
||||||
);
|
|
||||||
|
|
||||||
// put
|
// put
|
||||||
let req = test::TestRequest::put()
|
let req = test::TestRequest::put()
|
||||||
@ -630,16 +577,10 @@ async fn error_add_missing_payload_ndjson_documents() {
|
|||||||
let body = test::read_body(res).await;
|
let body = test::read_body(res).await;
|
||||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
assert_eq!(status_code, 400);
|
assert_eq!(status_code, 400);
|
||||||
assert_eq!(
|
assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#));
|
||||||
response["message"],
|
|
||||||
json!(r#"A ndjson payload is missing."#)
|
|
||||||
);
|
|
||||||
assert_eq!(response["code"], json!("missing_payload"));
|
assert_eq!(response["code"], json!("missing_payload"));
|
||||||
assert_eq!(response["type"], json!("invalid_request"));
|
assert_eq!(response["type"], json!("invalid_request"));
|
||||||
assert_eq!(
|
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
|
||||||
response["link"],
|
|
||||||
json!("https://docs.meilisearch.com/errors#missing_payload")
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -792,10 +733,7 @@ async fn add_larger_dataset() {
|
|||||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||||
assert_eq!(response["details"]["receivedDocuments"], 77);
|
assert_eq!(response["details"]["receivedDocuments"], 77);
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() })
|
||||||
limit: Some(1000),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200, "failed with `{}`", response);
|
assert_eq!(code, 200, "failed with `{}`", response);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
||||||
@ -900,9 +838,7 @@ async fn add_documents_invalid_geo_field() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.create(Some("id")).await;
|
index.create(Some("id")).await;
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["_geo"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["_geo"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = json!([
|
let documents = json!([
|
||||||
{
|
{
|
||||||
@ -1045,10 +981,7 @@ async fn batch_several_documents_addition() {
|
|||||||
|
|
||||||
// Check if there are exactly 120 documents (150 - 30) in the index;
|
// Check if there are exactly 120 documents (150 - 30) in the index;
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions { limit: Some(200), ..Default::default() })
|
||||||
limit: Some(200),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200, "failed with `{}`", response);
|
assert_eq!(code, 200, "failed with `{}`", response);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 120);
|
assert_eq!(response["results"].as_array().unwrap().len(), 120);
|
||||||
|
@ -29,9 +29,7 @@ async fn delete_one_unexisting_document() {
|
|||||||
async fn delete_one_document() {
|
async fn delete_one_document() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index
|
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
|
||||||
.add_documents(json!([{ "id": 0, "content": "foobar" }]), None)
|
|
||||||
.await;
|
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
let (_response, code) = server.index("test").delete_document(0).await;
|
let (_response, code) = server.index("test").delete_document(0).await;
|
||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
@ -68,9 +66,7 @@ async fn clear_all_documents() {
|
|||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let _update = index.wait_task(1).await;
|
let _update = index.wait_task(1).await;
|
||||||
let (response, code) = index
|
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert!(response["results"].as_array().unwrap().is_empty());
|
assert!(response["results"].as_array().unwrap().is_empty());
|
||||||
}
|
}
|
||||||
@ -85,9 +81,7 @@ async fn clear_all_documents_empty_index() {
|
|||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let _update = index.wait_task(0).await;
|
let _update = index.wait_task(0).await;
|
||||||
let (response, code) = index
|
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert!(response["results"].as_array().unwrap().is_empty());
|
assert!(response["results"].as_array().unwrap().is_empty());
|
||||||
}
|
}
|
||||||
@ -121,9 +115,7 @@ async fn delete_batch() {
|
|||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
|
|
||||||
let _update = index.wait_task(1).await;
|
let _update = index.wait_task(1).await;
|
||||||
let (response, code) = index
|
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||||
assert_eq!(response["results"][0]["id"], json!(3));
|
assert_eq!(response["results"][0]["id"], json!(3));
|
||||||
@ -139,9 +131,7 @@ async fn delete_no_document_batch() {
|
|||||||
assert_eq!(code, 202, "{}", _response);
|
assert_eq!(code, 202, "{}", _response);
|
||||||
|
|
||||||
let _update = index.wait_task(1).await;
|
let _update = index.wait_task(1).await;
|
||||||
let (response, code) = index
|
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 3);
|
assert_eq!(response["results"].as_array().unwrap().len(), 3);
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
|
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
use http::header::ACCEPT_ENCODING;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use urlencoding::encode as urlencode;
|
use urlencoding::encode as urlencode;
|
||||||
|
|
||||||
|
use crate::common::encoder::Encoder;
|
||||||
|
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
|
||||||
|
|
||||||
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
||||||
// transplant
|
// transplant
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -58,14 +58,8 @@ async fn get_document() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.get_document(
|
index.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["id"]) })).await;
|
||||||
0,
|
|
||||||
Some(GetDocumentOptions {
|
|
||||||
fields: Some(vec!["id"]),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
@ -75,12 +69,7 @@ async fn get_document() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_document(
|
.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["nested.content"]) }))
|
||||||
0,
|
|
||||||
Some(GetDocumentOptions {
|
|
||||||
fields: Some(vec!["nested.content"]),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -94,10 +83,8 @@ async fn get_document() {
|
|||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_get_unexisting_index_all_documents() {
|
async fn error_get_unexisting_index_all_documents() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let (response, code) = server
|
let (response, code) =
|
||||||
.index("test")
|
server.index("test").get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Index `test` not found.",
|
"message": "Index `test` not found.",
|
||||||
@ -119,9 +106,7 @@ async fn get_no_document() {
|
|||||||
|
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert!(response["results"].as_array().unwrap().is_empty());
|
assert!(response["results"].as_array().unwrap().is_empty());
|
||||||
}
|
}
|
||||||
@ -132,9 +117,7 @@ async fn get_all_documents_no_options() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.load_test_set().await;
|
index.load_test_set().await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let arr = response["results"].as_array().unwrap();
|
let arr = response["results"].as_array().unwrap();
|
||||||
assert_eq!(arr.len(), 20);
|
assert_eq!(arr.len(), 20);
|
||||||
@ -192,10 +175,7 @@ async fn test_get_all_documents_limit() {
|
|||||||
index.load_test_set().await;
|
index.load_test_set().await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions { limit: Some(5), ..Default::default() })
|
||||||
limit: Some(5),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 5);
|
assert_eq!(response["results"].as_array().unwrap().len(), 5);
|
||||||
@ -212,10 +192,7 @@ async fn test_get_all_documents_offset() {
|
|||||||
index.load_test_set().await;
|
index.load_test_set().await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions { offset: Some(5), ..Default::default() })
|
||||||
offset: Some(5),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||||
@ -338,24 +315,12 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
|||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.get_document(
|
index.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["content"]) })).await;
|
||||||
0,
|
|
||||||
Some(GetDocumentOptions {
|
|
||||||
fields: Some(vec!["content"]),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response, json!({}));
|
assert_eq!(response, json!({}));
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.get_document(
|
index.get_document(1, Some(GetDocumentOptions { fields: Some(vec!["content"]) })).await;
|
||||||
1,
|
|
||||||
Some(GetDocumentOptions {
|
|
||||||
fields: Some(vec!["content"]),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
@ -368,12 +333,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_document(
|
.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["content.truc"]) }))
|
||||||
0,
|
|
||||||
Some(GetDocumentOptions {
|
|
||||||
fields: Some(vec!["content.truc"]),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -383,12 +343,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_document(
|
.get_document(1, Some(GetDocumentOptions { fields: Some(vec!["content.truc"]) }))
|
||||||
1,
|
|
||||||
Some(GetDocumentOptions {
|
|
||||||
fields: Some(vec!["content.truc"]),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -405,20 +360,13 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
|||||||
async fn get_documents_displayed_attributes_is_ignored() {
|
async fn get_documents_displayed_attributes_is_ignored() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index
|
index.update_settings(json!({"displayedAttributes": ["gender"]})).await;
|
||||||
.update_settings(json!({"displayedAttributes": ["gender"]}))
|
|
||||||
.await;
|
|
||||||
index.load_test_set().await;
|
index.load_test_set().await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||||
assert_eq!(
|
assert_eq!(response["results"][0].as_object().unwrap().keys().count(), 16);
|
||||||
response["results"][0].as_object().unwrap().keys().count(),
|
|
||||||
16
|
|
||||||
);
|
|
||||||
assert!(response["results"][0]["gender"] != json!(null));
|
assert!(response["results"][0]["gender"] != json!(null));
|
||||||
|
|
||||||
assert_eq!(response["offset"], json!(0));
|
assert_eq!(response["offset"], json!(0));
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use serde_json::json;
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_document_update_create_index_bad_uid() {
|
async fn error_document_update_create_index_bad_uid() {
|
||||||
@ -84,10 +84,7 @@ async fn update_document() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document(1, None).await;
|
let (response, code) = index.get_document(1, None).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
||||||
response.to_string(),
|
|
||||||
r##"{"doc_id":1,"content":"foo","other":"bar"}"##
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -125,10 +122,7 @@ async fn update_document_gzip_encoded() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document(1, None).await;
|
let (response, code) = index.get_document(1, None).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
||||||
response.to_string(),
|
|
||||||
r##"{"doc_id":1,"content":"foo","other":"bar"}"##
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -143,10 +137,7 @@ async fn update_larger_dataset() {
|
|||||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() })
|
||||||
limit: Some(1000),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
mod data;
|
mod data;
|
||||||
|
|
||||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
|
|
||||||
use meilisearch_http::Opt;
|
use meilisearch_http::Opt;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use self::data::GetDump;
|
use self::data::GetDump;
|
||||||
|
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
|
||||||
|
|
||||||
// all the following test are ignored on windows. See #2364
|
// all the following test are ignored on windows. See #2364
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -17,14 +17,8 @@ async fn import_dump_v1() {
|
|||||||
GetDump::MoviesWithSettingsV1.path(),
|
GetDump::MoviesWithSettingsV1.path(),
|
||||||
GetDump::RubyGemsWithSettingsV1.path(),
|
GetDump::RubyGemsWithSettingsV1.path(),
|
||||||
] {
|
] {
|
||||||
let options = Opt {
|
let options = Opt { import_dump: Some(path), ..default_settings(temp.path()) };
|
||||||
import_dump: Some(path),
|
let error = Server::new_with_options(options).await.map(drop).unwrap_err();
|
||||||
..default_settings(temp.path())
|
|
||||||
};
|
|
||||||
let error = Server::new_with_options(options)
|
|
||||||
.await
|
|
||||||
.map(drop)
|
|
||||||
.unwrap_err();
|
|
||||||
|
|
||||||
assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.");
|
assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.");
|
||||||
}
|
}
|
||||||
@ -35,10 +29,8 @@ async fn import_dump_v1() {
|
|||||||
async fn import_dump_v2_movie_raw() {
|
async fn import_dump_v2_movie_raw() {
|
||||||
let temp = tempfile::tempdir().unwrap();
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
let options = Opt {
|
let options =
|
||||||
import_dump: Some(GetDump::MoviesRawV2.path()),
|
Opt { import_dump: Some(GetDump::MoviesRawV2.path()), ..default_settings(temp.path()) };
|
||||||
..default_settings(temp.path())
|
|
||||||
};
|
|
||||||
let server = Server::new_with_options(options).await.unwrap();
|
let server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
let (indexes, code) = server.list_indexes(None, None).await;
|
let (indexes, code) = server.list_indexes(None, None).await;
|
||||||
@ -227,10 +219,8 @@ async fn import_dump_v2_rubygems_with_settings() {
|
|||||||
async fn import_dump_v3_movie_raw() {
|
async fn import_dump_v3_movie_raw() {
|
||||||
let temp = tempfile::tempdir().unwrap();
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
let options = Opt {
|
let options =
|
||||||
import_dump: Some(GetDump::MoviesRawV3.path()),
|
Opt { import_dump: Some(GetDump::MoviesRawV3.path()), ..default_settings(temp.path()) };
|
||||||
..default_settings(temp.path())
|
|
||||||
};
|
|
||||||
let server = Server::new_with_options(options).await.unwrap();
|
let server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
let (indexes, code) = server.list_indexes(None, None).await;
|
let (indexes, code) = server.list_indexes(None, None).await;
|
||||||
@ -419,10 +409,8 @@ async fn import_dump_v3_rubygems_with_settings() {
|
|||||||
async fn import_dump_v4_movie_raw() {
|
async fn import_dump_v4_movie_raw() {
|
||||||
let temp = tempfile::tempdir().unwrap();
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
let options = Opt {
|
let options =
|
||||||
import_dump: Some(GetDump::MoviesRawV4.path()),
|
Opt { import_dump: Some(GetDump::MoviesRawV4.path()), ..default_settings(temp.path()) };
|
||||||
..default_settings(temp.path())
|
|
||||||
};
|
|
||||||
let server = Server::new_with_options(options).await.unwrap();
|
let server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
let (indexes, code) = server.list_indexes(None, None).await;
|
let (indexes, code) = server.list_indexes(None, None).await;
|
||||||
@ -611,10 +599,8 @@ async fn import_dump_v4_rubygems_with_settings() {
|
|||||||
async fn import_dump_v5() {
|
async fn import_dump_v5() {
|
||||||
let temp = tempfile::tempdir().unwrap();
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
let options = Opt {
|
let options =
|
||||||
import_dump: Some(GetDump::TestV5.path()),
|
Opt { import_dump: Some(GetDump::TestV5.path()), ..default_settings(temp.path()) };
|
||||||
..default_settings(temp.path())
|
|
||||||
};
|
|
||||||
let mut server = Server::new_auth_with_options(options, temp).await;
|
let mut server = Server::new_auth_with_options(options, temp).await;
|
||||||
server.use_api_key("MASTER_KEY");
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
@ -654,14 +640,10 @@ async fn import_dump_v5() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(stats, expected_stats);
|
assert_eq!(stats, expected_stats);
|
||||||
|
|
||||||
let (docs, code) = index2
|
let (docs, code) = index2.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
||||||
let (docs, code) = index1
|
let (docs, code) = index1.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
.get_all_documents(GetAllDocumentsOptions::default())
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
||||||
|
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
use crate::common::encoder::Encoder;
|
|
||||||
use crate::common::Server;
|
|
||||||
use actix_web::http::header::ContentType;
|
use actix_web::http::header::ContentType;
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
use http::header::ACCEPT_ENCODING;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
use crate::common::encoder::Encoder;
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn create_index_no_primary_key() {
|
async fn create_index_no_primary_key() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use serde_json::json;
|
|
||||||
use serde_json::Value;
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn create_and_get_index() {
|
async fn create_and_get_index() {
|
||||||
@ -63,12 +63,8 @@ async fn list_multiple_indexes() {
|
|||||||
assert!(response["results"].is_array());
|
assert!(response["results"].is_array());
|
||||||
let arr = response["results"].as_array().unwrap();
|
let arr = response["results"].as_array().unwrap();
|
||||||
assert_eq!(arr.len(), 2);
|
assert_eq!(arr.len(), 2);
|
||||||
assert!(arr
|
assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
|
||||||
.iter()
|
assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||||
.any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
|
|
||||||
assert!(arr
|
|
||||||
.iter()
|
|
||||||
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -77,10 +73,7 @@ async fn get_and_paginate_indexes() {
|
|||||||
const NB_INDEXES: usize = 50;
|
const NB_INDEXES: usize = 50;
|
||||||
for i in 0..NB_INDEXES {
|
for i in 0..NB_INDEXES {
|
||||||
server.index(&format!("test_{i:02}")).create(None).await;
|
server.index(&format!("test_{i:02}")).create(None).await;
|
||||||
server
|
server.index(&format!("test_{i:02}")).wait_task(i as u64).await;
|
||||||
.index(&format!("test_{i:02}"))
|
|
||||||
.wait_task(i as u64)
|
|
||||||
.await;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// basic
|
// basic
|
||||||
|
@ -17,10 +17,7 @@ async fn stats() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["numberOfDocuments"], 0);
|
assert_eq!(response["numberOfDocuments"], 0);
|
||||||
assert!(response["isIndexing"] == false);
|
assert!(response["isIndexing"] == false);
|
||||||
assert!(response["fieldDistribution"]
|
assert!(response["fieldDistribution"].as_object().unwrap().is_empty());
|
||||||
.as_object()
|
|
||||||
.unwrap()
|
|
||||||
.is_empty());
|
|
||||||
|
|
||||||
let documents = json!([
|
let documents = json!([
|
||||||
{
|
{
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
|
use serde_json::json;
|
||||||
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use serde_json::json;
|
|
||||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn update_primary_key() {
|
async fn update_primary_key() {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::common::Server;
|
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use super::DOCUMENTS;
|
use super::DOCUMENTS;
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_unexisting_index() {
|
async fn search_unexisting_index() {
|
||||||
@ -45,16 +45,14 @@ async fn search_invalid_highlight_and_crop_tags() {
|
|||||||
|
|
||||||
for field in fields {
|
for field in fields {
|
||||||
// object
|
// object
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.search_post(json!({field.to_string(): {"marker": "<crop>"}}))
|
index.search_post(json!({field.to_string(): {"marker": "<crop>"}})).await;
|
||||||
.await;
|
|
||||||
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
|
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
|
||||||
assert_eq!(response["code"], "bad_request");
|
assert_eq!(response["code"], "bad_request");
|
||||||
|
|
||||||
// array
|
// array
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.search_post(json!({field.to_string(): ["marker", "<crop>"]}))
|
index.search_post(json!({field.to_string(): ["marker", "<crop>"]})).await;
|
||||||
.await;
|
|
||||||
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
|
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
|
||||||
assert_eq!(response["code"], "bad_request");
|
assert_eq!(response["code"], "bad_request");
|
||||||
}
|
}
|
||||||
@ -65,9 +63,7 @@ async fn filter_invalid_syntax_object() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -92,9 +88,7 @@ async fn filter_invalid_syntax_array() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -119,9 +113,7 @@ async fn filter_invalid_syntax_string() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -134,13 +126,10 @@ async fn filter_invalid_syntax_string() {
|
|||||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({"filter": "title = Glass XOR title = Glass"}), |response, code| {
|
||||||
json!({"filter": "title = Glass XOR title = Glass"}),
|
assert_eq!(response, expected_response);
|
||||||
|response, code| {
|
assert_eq!(code, 400);
|
||||||
assert_eq!(response, expected_response);
|
})
|
||||||
assert_eq!(code, 400);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -149,9 +138,7 @@ async fn filter_invalid_attribute_array() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -176,9 +163,7 @@ async fn filter_invalid_attribute_string() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -203,9 +188,7 @@ async fn filter_reserved_geo_attribute_array() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -230,9 +213,7 @@ async fn filter_reserved_geo_attribute_string() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -257,9 +238,7 @@ async fn filter_reserved_attribute_array() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -272,13 +251,10 @@ async fn filter_reserved_attribute_array() {
|
|||||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({"filter": ["_geoDistance = Glass"]}), |response, code| {
|
||||||
json!({"filter": ["_geoDistance = Glass"]}),
|
assert_eq!(response, expected_response);
|
||||||
|response, code| {
|
assert_eq!(code, 400);
|
||||||
assert_eq!(response, expected_response);
|
})
|
||||||
assert_eq!(code, 400);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -287,9 +263,7 @@ async fn filter_reserved_attribute_string() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -302,13 +276,10 @@ async fn filter_reserved_attribute_string() {
|
|||||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||||
});
|
});
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({"filter": "_geoDistance = Glass"}), |response, code| {
|
||||||
json!({"filter": "_geoDistance = Glass"}),
|
assert_eq!(response, expected_response);
|
||||||
|response, code| {
|
assert_eq!(code, 400);
|
||||||
assert_eq!(response, expected_response);
|
})
|
||||||
assert_eq!(code, 400);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -317,9 +288,7 @@ async fn sort_geo_reserved_attribute() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["id"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -349,9 +318,7 @@ async fn sort_reserved_attribute() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["id"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -381,9 +348,7 @@ async fn sort_unsortable_attribute() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["id"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -413,9 +378,7 @@ async fn sort_invalid_syntax() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["id"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
|
use serde_json::json;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn formatted_contain_wildcard() {
|
async fn formatted_contain_wildcard() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({ "displayedAttributes": ["id", "cattos"] })).await;
|
||||||
.update_settings(json!({ "displayedAttributes": ["id", "cattos"] }))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = NESTED_DOCUMENTS.clone();
|
let documents = NESTED_DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -34,19 +33,16 @@ async fn formatted_contain_wildcard() {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({ "q": "pesti", "attributesToRetrieve": ["*"] }), |response, code| {
|
||||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"] }),
|
assert_eq!(code, 200, "{}", response);
|
||||||
|response, code| {
|
assert_eq!(
|
||||||
assert_eq!(code, 200, "{}", response);
|
response["hits"][0],
|
||||||
assert_eq!(
|
json!({
|
||||||
response["hits"][0],
|
"id": 852,
|
||||||
json!({
|
"cattos": "pesti",
|
||||||
"id": 852,
|
})
|
||||||
"cattos": "pesti",
|
);
|
||||||
})
|
})
|
||||||
);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
@ -91,23 +87,20 @@ async fn formatted_contain_wildcard() {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({ "q": "pesti", "attributesToCrop": ["*"] }), |response, code| {
|
||||||
json!({ "q": "pesti", "attributesToCrop": ["*"] }),
|
assert_eq!(code, 200, "{}", response);
|
||||||
|response, code| {
|
assert_eq!(
|
||||||
assert_eq!(code, 200, "{}", response);
|
response["hits"][0],
|
||||||
assert_eq!(
|
json!({
|
||||||
response["hits"][0],
|
"id": 852,
|
||||||
json!({
|
"cattos": "pesti",
|
||||||
"id": 852,
|
"_formatted": {
|
||||||
|
"id": "852",
|
||||||
"cattos": "pesti",
|
"cattos": "pesti",
|
||||||
"_formatted": {
|
}
|
||||||
"id": "852",
|
})
|
||||||
"cattos": "pesti",
|
);
|
||||||
}
|
})
|
||||||
})
|
|
||||||
);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,27 +114,24 @@ async fn format_nested() {
|
|||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }), |response, code| {
|
||||||
json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }),
|
assert_eq!(code, 200, "{}", response);
|
||||||
|response, code| {
|
assert_eq!(
|
||||||
assert_eq!(code, 200, "{}", response);
|
response["hits"][0],
|
||||||
assert_eq!(
|
json!({
|
||||||
response["hits"][0],
|
"doggos": [
|
||||||
json!({
|
{
|
||||||
"doggos": [
|
"name": "bobby",
|
||||||
{
|
"age": 2,
|
||||||
"name": "bobby",
|
},
|
||||||
"age": 2,
|
{
|
||||||
},
|
"name": "buddy",
|
||||||
{
|
"age": 4,
|
||||||
"name": "buddy",
|
},
|
||||||
"age": 4,
|
],
|
||||||
},
|
})
|
||||||
],
|
);
|
||||||
})
|
})
|
||||||
);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
@ -297,9 +287,7 @@ async fn displayedattr_2_smol() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
// not enough displayed for the other settings
|
// not enough displayed for the other settings
|
||||||
index
|
index.update_settings(json!({ "displayedAttributes": ["id"] })).await;
|
||||||
.update_settings(json!({ "displayedAttributes": ["id"] }))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = NESTED_DOCUMENTS.clone();
|
let documents = NESTED_DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -319,36 +307,30 @@ async fn displayedattr_2_smol() {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({ "attributesToRetrieve": ["id"] }), |response, code| {
|
||||||
json!({ "attributesToRetrieve": ["id"] }),
|
assert_eq!(code, 200, "{}", response);
|
||||||
|response, code| {
|
assert_eq!(
|
||||||
assert_eq!(code, 200, "{}", response);
|
response["hits"][0],
|
||||||
assert_eq!(
|
json!({
|
||||||
response["hits"][0],
|
"id": 852,
|
||||||
json!({
|
})
|
||||||
"id": 852,
|
);
|
||||||
})
|
})
|
||||||
);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({ "attributesToHighlight": ["id"] }), |response, code| {
|
||||||
json!({ "attributesToHighlight": ["id"] }),
|
assert_eq!(code, 200, "{}", response);
|
||||||
|response, code| {
|
assert_eq!(
|
||||||
assert_eq!(code, 200, "{}", response);
|
response["hits"][0],
|
||||||
assert_eq!(
|
json!({
|
||||||
response["hits"][0],
|
"id": 852,
|
||||||
json!({
|
"_formatted": {
|
||||||
"id": 852,
|
"id": "852",
|
||||||
"_formatted": {
|
}
|
||||||
"id": "852",
|
})
|
||||||
}
|
);
|
||||||
})
|
})
|
||||||
);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
@ -385,43 +367,34 @@ async fn displayedattr_2_smol() {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({ "attributesToHighlight": ["cattos"] }), |response, code| {
|
||||||
json!({ "attributesToHighlight": ["cattos"] }),
|
assert_eq!(code, 200, "{}", response);
|
||||||
|response, code| {
|
assert_eq!(
|
||||||
assert_eq!(code, 200, "{}", response);
|
response["hits"][0],
|
||||||
assert_eq!(
|
json!({
|
||||||
response["hits"][0],
|
"id": 852,
|
||||||
json!({
|
})
|
||||||
"id": 852,
|
);
|
||||||
})
|
})
|
||||||
);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({ "attributesToCrop": ["cattos"] }), |response, code| {
|
||||||
json!({ "attributesToCrop": ["cattos"] }),
|
assert_eq!(code, 200, "{}", response);
|
||||||
|response, code| {
|
assert_eq!(
|
||||||
assert_eq!(code, 200, "{}", response);
|
response["hits"][0],
|
||||||
assert_eq!(
|
json!({
|
||||||
response["hits"][0],
|
"id": 852,
|
||||||
json!({
|
})
|
||||||
"id": 852,
|
);
|
||||||
})
|
})
|
||||||
);
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(json!({ "attributesToRetrieve": ["cattos"] }), |response, code| {
|
||||||
json!({ "attributesToRetrieve": ["cattos"] }),
|
assert_eq!(code, 200, "{}", response);
|
||||||
|response, code| {
|
assert_eq!(response["hits"][0], json!({}));
|
||||||
assert_eq!(code, 200, "{}", response);
|
})
|
||||||
assert_eq!(response["hits"][0], json!({}));
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
|
@ -5,10 +5,11 @@ mod errors;
|
|||||||
mod formatted;
|
mod formatted;
|
||||||
mod pagination;
|
mod pagination;
|
||||||
|
|
||||||
use crate::common::Server;
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
json!([
|
json!([
|
||||||
{
|
{
|
||||||
@ -199,9 +200,7 @@ async fn search_with_filter_string_notation() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -221,9 +220,7 @@ async fn search_with_filter_string_notation() {
|
|||||||
|
|
||||||
let index = server.index("nested");
|
let index = server.index("nested");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = NESTED_DOCUMENTS.clone();
|
let documents = NESTED_DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -262,9 +259,7 @@ async fn search_with_filter_array_notation() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -292,9 +287,7 @@ async fn search_with_sort_on_numbers() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["id"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -314,9 +307,7 @@ async fn search_with_sort_on_numbers() {
|
|||||||
|
|
||||||
let index = server.index("nested");
|
let index = server.index("nested");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["doggos.age"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["doggos.age"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = NESTED_DOCUMENTS.clone();
|
let documents = NESTED_DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -340,9 +331,7 @@ async fn search_with_sort_on_strings() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -362,9 +351,7 @@ async fn search_with_sort_on_strings() {
|
|||||||
|
|
||||||
let index = server.index("nested");
|
let index = server.index("nested");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["doggos.name"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["doggos.name"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = NESTED_DOCUMENTS.clone();
|
let documents = NESTED_DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -388,9 +375,7 @@ async fn search_with_multiple_sort() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"sortableAttributes": ["id", "title"]})).await;
|
||||||
.update_settings(json!({"sortableAttributes": ["id", "title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -410,9 +395,7 @@ async fn search_facet_distribution() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["title"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -434,9 +417,7 @@ async fn search_facet_distribution() {
|
|||||||
|
|
||||||
let index = server.index("nested");
|
let index = server.index("nested");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["father", "doggos.name"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["father", "doggos.name"]}))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = NESTED_DOCUMENTS.clone();
|
let documents = NESTED_DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
@ -467,9 +448,7 @@ async fn search_facet_distribution() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index.update_settings(json!({"filterableAttributes": ["doggos"]})).await;
|
||||||
.update_settings(json!({"filterableAttributes": ["doggos"]}))
|
|
||||||
.await;
|
|
||||||
index.wait_task(4).await;
|
index.wait_task(4).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
@ -502,10 +481,7 @@ async fn search_facet_distribution() {
|
|||||||
dist["doggos.name"],
|
dist["doggos.name"],
|
||||||
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
|
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(dist["doggos.age"], json!({ "2": 1, "4": 1, "5": 1, "6": 1, "8": 1}));
|
||||||
dist["doggos.age"],
|
|
||||||
json!({ "2": 1, "4": 1, "5": 1, "6": 1, "8": 1})
|
|
||||||
);
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
@ -516,17 +492,14 @@ async fn displayed_attributes() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({ "displayedAttributes": ["title"] })).await;
|
||||||
.update_settings(json!({ "displayedAttributes": ["title"] }))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.search_post(json!({ "attributesToRetrieve": ["title", "id"] }))
|
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200, "{}", response);
|
assert_eq!(code, 200, "{}", response);
|
||||||
assert!(response["hits"][0].get("title").is_some());
|
assert!(response["hits"][0].get("title").is_some());
|
||||||
}
|
}
|
||||||
@ -536,9 +509,7 @@ async fn placeholder_search_is_hard_limited() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let documents: Vec<_> = (0..1200)
|
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
|
||||||
.map(|i| json!({ "id": i, "text": "I am unique!" }))
|
|
||||||
.collect();
|
|
||||||
index.add_documents(documents.into(), None).await;
|
index.add_documents(documents.into(), None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
@ -567,9 +538,7 @@ async fn placeholder_search_is_hard_limited() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
|
||||||
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
|
|
||||||
.await;
|
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
@ -603,9 +572,7 @@ async fn search_is_hard_limited() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let documents: Vec<_> = (0..1200)
|
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
|
||||||
.map(|i| json!({ "id": i, "text": "I am unique!" }))
|
|
||||||
.collect();
|
|
||||||
index.add_documents(documents.into(), None).await;
|
index.add_documents(documents.into(), None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
@ -636,9 +603,7 @@ async fn search_is_hard_limited() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
|
||||||
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
|
|
||||||
.await;
|
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
@ -674,13 +639,9 @@ async fn faceting_max_values_per_facet() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({ "filterableAttributes": ["number"] })).await;
|
||||||
.update_settings(json!({ "filterableAttributes": ["number"] }))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let documents: Vec<_> = (0..10_000)
|
let documents: Vec<_> = (0..10_000).map(|id| json!({ "id": id, "number": id * 10 })).collect();
|
||||||
.map(|id| json!({ "id": id, "number": id * 10 }))
|
|
||||||
.collect();
|
|
||||||
index.add_documents(json!(documents), None).await;
|
index.add_documents(json!(documents), None).await;
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
@ -697,9 +658,7 @@ async fn faceting_max_values_per_facet() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
|
||||||
.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } }))
|
|
||||||
.await;
|
|
||||||
index.wait_task(2).await;
|
index.wait_task(2).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
|
@ -1,23 +1,20 @@
|
|||||||
use crate::common::Server;
|
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn set_and_reset_distinct_attribute() {
|
async fn set_and_reset_distinct_attribute() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (_response, _code) = index
|
let (_response, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
|
||||||
.update_settings(json!({ "distinctAttribute": "test"}))
|
|
||||||
.await;
|
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, _) = index.settings().await;
|
let (response, _) = index.settings().await;
|
||||||
|
|
||||||
assert_eq!(response["distinctAttribute"], "test");
|
assert_eq!(response["distinctAttribute"], "test");
|
||||||
|
|
||||||
index
|
index.update_settings(json!({ "distinctAttribute": null })).await;
|
||||||
.update_settings(json!({ "distinctAttribute": null }))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
@ -13,14 +13,7 @@ static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|
|
|||||||
map.insert("distinct_attribute", json!(Value::Null));
|
map.insert("distinct_attribute", json!(Value::Null));
|
||||||
map.insert(
|
map.insert(
|
||||||
"ranking_rules",
|
"ranking_rules",
|
||||||
json!([
|
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
]),
|
|
||||||
);
|
);
|
||||||
map.insert("stop_words", json!([]));
|
map.insert("stop_words", json!([]));
|
||||||
map.insert("synonyms", json!({}));
|
map.insert("synonyms", json!({}));
|
||||||
@ -63,14 +56,7 @@ async fn get_settings() {
|
|||||||
assert_eq!(settings["distinctAttribute"], json!(null));
|
assert_eq!(settings["distinctAttribute"], json!(null));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
settings["rankingRules"],
|
settings["rankingRules"],
|
||||||
json!([
|
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"])
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
])
|
|
||||||
);
|
);
|
||||||
assert_eq!(settings["stopWords"], json!([]));
|
assert_eq!(settings["stopWords"], json!([]));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -99,18 +85,14 @@ async fn error_update_settings_unknown_field() {
|
|||||||
async fn test_partial_update() {
|
async fn test_partial_update() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
let (_response, _code) = index
|
let (_response, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
|
||||||
.update_settings(json!({"displayedAttributes": ["foo"]}))
|
|
||||||
.await;
|
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
||||||
assert_eq!(response["searchableAttributes"], json!(["*"]));
|
assert_eq!(response["searchableAttributes"], json!(["*"]));
|
||||||
|
|
||||||
let (_response, _) = index
|
let (_response, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
|
||||||
.update_settings(json!({"searchableAttributes": ["bar"]}))
|
|
||||||
.await;
|
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
@ -158,10 +140,7 @@ async fn reset_all_settings() {
|
|||||||
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
|
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
|
||||||
assert_eq!(response["searchableAttributes"], json!(["name"]));
|
assert_eq!(response["searchableAttributes"], json!(["name"]));
|
||||||
assert_eq!(response["stopWords"], json!(["the"]));
|
assert_eq!(response["stopWords"], json!(["the"]));
|
||||||
assert_eq!(
|
assert_eq!(response["synonyms"], json!({"puppy": ["dog", "doggo", "potat"] }));
|
||||||
response["synonyms"],
|
|
||||||
json!({"puppy": ["dog", "doggo", "potat"] })
|
|
||||||
);
|
|
||||||
assert_eq!(response["filterableAttributes"], json!(["age"]));
|
assert_eq!(response["filterableAttributes"], json!(["age"]));
|
||||||
|
|
||||||
index.delete_settings().await;
|
index.delete_settings().await;
|
||||||
@ -299,9 +278,8 @@ async fn error_set_invalid_ranking_rules() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
|
|
||||||
let (_response, _code) = index
|
let (_response, _code) =
|
||||||
.update_settings(json!({ "rankingRules": [ "manyTheFish"]}))
|
index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
|
||||||
.await;
|
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
let (response, code) = index.get_task(1).await;
|
let (response, code) = index.get_task(1).await;
|
||||||
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::common::server::default_settings;
|
use meilisearch_http::Opt;
|
||||||
use crate::common::GetAllDocumentsOptions;
|
|
||||||
use crate::common::Server;
|
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
|
|
||||||
use meilisearch_http::Opt;
|
use crate::common::server::default_settings;
|
||||||
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
|
|
||||||
macro_rules! verify_snapshot {
|
macro_rules! verify_snapshot {
|
||||||
(
|
(
|
||||||
@ -62,10 +61,7 @@ async fn perform_snapshot() {
|
|||||||
|
|
||||||
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
||||||
|
|
||||||
let options = Opt {
|
let options = Opt { import_snapshot: Some(snapshot_path), ..default_settings(temp.path()) };
|
||||||
import_snapshot: Some(snapshot_path),
|
|
||||||
..default_settings(temp.path())
|
|
||||||
};
|
|
||||||
|
|
||||||
let snapshot_server = Server::new_with_options(options).await.unwrap();
|
let snapshot_server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::Server;
|
||||||
|
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
use crate::common::Server;
|
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
use crate::common::Server;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_get_unexisting_task_status() {
|
async fn error_get_unexisting_task_status() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
@ -49,10 +50,7 @@ async fn list_tasks() {
|
|||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
index
|
index
|
||||||
.add_documents(
|
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
let (response, code) = index.list_tasks().await;
|
let (response, code) = index.list_tasks().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
@ -66,10 +64,7 @@ async fn list_tasks_with_star_filters() {
|
|||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
index
|
index
|
||||||
.add_documents(
|
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
let (response, code) = index.service.get("/tasks?indexUid=test").await;
|
let (response, code) = index.service.get("/tasks?indexUid=test").await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
@ -87,10 +82,8 @@ async fn list_tasks_with_star_filters() {
|
|||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.service
|
index.service.get("/tasks?type=*,documentAdditionOrUpdate&status=*").await;
|
||||||
.get("/tasks?type=*,documentAdditionOrUpdate&status=*")
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200, "{:?}", response);
|
assert_eq!(code, 200, "{:?}", response);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||||
|
|
||||||
@ -116,10 +109,7 @@ async fn list_tasks_status_filtered() {
|
|||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
index
|
index
|
||||||
.add_documents(
|
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
|
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
|
||||||
@ -145,19 +135,15 @@ async fn list_tasks_type_filtered() {
|
|||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
index
|
index
|
||||||
.add_documents(
|
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
|
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
|
||||||
assert_eq!(code, 200, "{}", response);
|
assert_eq!(code, 200, "{}", response);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[])
|
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[]).await;
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200, "{}", response);
|
assert_eq!(code, 200, "{}", response);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||||
}
|
}
|
||||||
@ -169,10 +155,7 @@ async fn list_tasks_status_and_type_filtered() {
|
|||||||
index.create(None).await;
|
index.create(None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
index
|
index
|
||||||
.add_documents(
|
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;
|
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;
|
||||||
|
@ -2,14 +2,15 @@ use std::borrow::Borrow;
|
|||||||
use std::fmt::{self, Debug, Display};
|
use std::fmt::{self, Debug, Display};
|
||||||
use std::io::{self, BufReader, Read, Seek, Write};
|
use std::io::{self, BufReader, Read, Seek, Write};
|
||||||
|
|
||||||
use crate::error::{Code, ErrorCode};
|
|
||||||
use crate::internal_error;
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use milli::documents::{DocumentsBatchBuilder, Error};
|
use milli::documents::{DocumentsBatchBuilder, Error};
|
||||||
use milli::Object;
|
use milli::Object;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::error::Category;
|
use serde_json::error::Category;
|
||||||
|
|
||||||
|
use crate::error::{Code, ErrorCode};
|
||||||
|
use crate::internal_error;
|
||||||
|
|
||||||
type Result<T> = std::result::Result<T, DocumentFormatError>;
|
type Result<T> = std::result::Result<T, DocumentFormatError>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@ -105,10 +106,7 @@ pub fn read_csv(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
|||||||
builder.append_csv(csv).map_err(|e| (PayloadType::Csv, e))?;
|
builder.append_csv(csv).map_err(|e| (PayloadType::Csv, e))?;
|
||||||
|
|
||||||
let count = builder.documents_count();
|
let count = builder.documents_count();
|
||||||
let _ = builder
|
let _ = builder.into_inner().map_err(Into::into).map_err(DocumentFormatError::Internal)?;
|
||||||
.into_inner()
|
|
||||||
.map_err(Into::into)
|
|
||||||
.map_err(DocumentFormatError::Internal)?;
|
|
||||||
|
|
||||||
Ok(count as usize)
|
Ok(count as usize)
|
||||||
}
|
}
|
||||||
@ -119,9 +117,7 @@ pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<usize>
|
|||||||
let reader = BufReader::new(input);
|
let reader = BufReader::new(input);
|
||||||
|
|
||||||
for result in serde_json::Deserializer::from_reader(reader).into_iter() {
|
for result in serde_json::Deserializer::from_reader(reader).into_iter() {
|
||||||
let object = result
|
let object = result.map_err(Error::Json).map_err(|e| (PayloadType::Ndjson, e))?;
|
||||||
.map_err(Error::Json)
|
|
||||||
.map_err(|e| (PayloadType::Ndjson, e))?;
|
|
||||||
builder
|
builder
|
||||||
.append_json_object(&object)
|
.append_json_object(&object)
|
||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
@ -129,10 +125,7 @@ pub fn read_ndjson(input: impl Read, writer: impl Write + Seek) -> Result<usize>
|
|||||||
}
|
}
|
||||||
|
|
||||||
let count = builder.documents_count();
|
let count = builder.documents_count();
|
||||||
let _ = builder
|
let _ = builder.into_inner().map_err(Into::into).map_err(DocumentFormatError::Internal)?;
|
||||||
.into_inner()
|
|
||||||
.map_err(Into::into)
|
|
||||||
.map_err(DocumentFormatError::Internal)?;
|
|
||||||
|
|
||||||
Ok(count as usize)
|
Ok(count as usize)
|
||||||
}
|
}
|
||||||
@ -149,9 +142,8 @@ pub fn read_json(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
|||||||
inner: Either<Vec<Object>, Object>,
|
inner: Either<Vec<Object>, Object>,
|
||||||
}
|
}
|
||||||
|
|
||||||
let content: ArrayOrSingleObject = serde_json::from_reader(reader)
|
let content: ArrayOrSingleObject =
|
||||||
.map_err(Error::Json)
|
serde_json::from_reader(reader).map_err(Error::Json).map_err(|e| (PayloadType::Json, e))?;
|
||||||
.map_err(|e| (PayloadType::Json, e))?;
|
|
||||||
|
|
||||||
for object in content.inner.map_right(|o| vec![o]).into_inner() {
|
for object in content.inner.map_right(|o| vec![o]).into_inner() {
|
||||||
builder
|
builder
|
||||||
@ -161,10 +153,7 @@ pub fn read_json(input: impl Read, writer: impl Write + Seek) -> Result<usize> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let count = builder.documents_count();
|
let count = builder.documents_count();
|
||||||
let _ = builder
|
let _ = builder.into_inner().map_err(Into::into).map_err(DocumentFormatError::Internal)?;
|
||||||
.into_inner()
|
|
||||||
.map_err(Into::into)
|
|
||||||
.map_err(DocumentFormatError::Internal)?;
|
|
||||||
|
|
||||||
Ok(count as usize)
|
Ok(count as usize)
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use actix_web::{self as aweb, http::StatusCode, HttpResponseBuilder};
|
use actix_web::http::StatusCode;
|
||||||
|
use actix_web::{self as aweb, HttpResponseBuilder};
|
||||||
use aweb::rt::task::JoinError;
|
use aweb::rt::task::JoinError;
|
||||||
use milli::heed::{Error as HeedError, MdbError};
|
use milli::heed::{Error as HeedError, MdbError};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -10,10 +11,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||||
pub struct ResponseError {
|
pub struct ResponseError {
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
#[cfg_attr(
|
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
|
||||||
feature = "test-traits",
|
|
||||||
proptest(strategy = "strategy::status_code_strategy()")
|
|
||||||
)]
|
|
||||||
code: StatusCode,
|
code: StatusCode,
|
||||||
message: String,
|
message: String,
|
||||||
#[serde(rename = "code")]
|
#[serde(rename = "code")]
|
||||||
@ -62,9 +60,7 @@ where
|
|||||||
impl aweb::error::ResponseError for ResponseError {
|
impl aweb::error::ResponseError for ResponseError {
|
||||||
fn error_response(&self) -> aweb::HttpResponse {
|
fn error_response(&self) -> aweb::HttpResponse {
|
||||||
let json = serde_json::to_vec(self).unwrap();
|
let json = serde_json::to_vec(self).unwrap();
|
||||||
HttpResponseBuilder::new(self.status_code())
|
HttpResponseBuilder::new(self.status_code()).content_type("application/json").body(json)
|
||||||
.content_type("application/json")
|
|
||||||
.body(json)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn status_code(&self) -> StatusCode {
|
fn status_code(&self) -> StatusCode {
|
||||||
@ -227,10 +223,9 @@ impl Code {
|
|||||||
|
|
||||||
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
||||||
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
||||||
DatabaseSizeLimitReached => ErrCode::internal(
|
DatabaseSizeLimitReached => {
|
||||||
"database_size_limit_reached",
|
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
}
|
||||||
),
|
|
||||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
||||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
||||||
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
|
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
|
||||||
@ -336,27 +331,15 @@ struct ErrCode {
|
|||||||
|
|
||||||
impl ErrCode {
|
impl ErrCode {
|
||||||
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::AuthenticationError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::InternalError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||||
ErrCode {
|
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
|
||||||
status_code,
|
|
||||||
error_name,
|
|
||||||
error_type: ErrorType::InvalidRequestError,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
||||||
/// bytes long
|
/// bytes long
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||||
@ -38,9 +39,7 @@ impl TryFrom<String> for IndexUid {
|
|||||||
type Error = IndexUidFormatError;
|
type Error = IndexUidFormatError;
|
||||||
|
|
||||||
fn try_from(uid: String) -> Result<Self, Self::Error> {
|
fn try_from(uid: String) -> Result<Self, Self::Error> {
|
||||||
if !uid
|
if !uid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
||||||
.chars()
|
|
||||||
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
|
||||||
|| uid.is_empty()
|
|| uid.is_empty()
|
||||||
|| uid.len() > 400
|
|| uid.len() > 400
|
||||||
{
|
{
|
||||||
|
@ -1,15 +1,17 @@
|
|||||||
use crate::error::{Code, ErrorCode};
|
use std::hash::Hash;
|
||||||
use crate::index_uid::IndexUid;
|
|
||||||
use crate::star_or::StarOr;
|
|
||||||
use enum_iterator::Sequence;
|
use enum_iterator::Sequence;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{from_value, Value};
|
use serde_json::{from_value, Value};
|
||||||
use std::hash::Hash;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::macros::{format_description, time};
|
use time::macros::{format_description, time};
|
||||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::error::{Code, ErrorCode};
|
||||||
|
use crate::index_uid::IndexUid;
|
||||||
|
use crate::star_or::StarOr;
|
||||||
|
|
||||||
type Result<T> = std::result::Result<T, Error>;
|
type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
pub type KeyId = Uuid;
|
pub type KeyId = Uuid;
|
||||||
@ -74,16 +76,7 @@ impl Key {
|
|||||||
let created_at = OffsetDateTime::now_utc();
|
let created_at = OffsetDateTime::now_utc();
|
||||||
let updated_at = created_at;
|
let updated_at = created_at;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self { name, description, uid, actions, indexes, expires_at, created_at, updated_at })
|
||||||
name,
|
|
||||||
description,
|
|
||||||
uid,
|
|
||||||
actions,
|
|
||||||
indexes,
|
|
||||||
expires_at,
|
|
||||||
created_at,
|
|
||||||
updated_at,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_from_value(&mut self, value: Value) -> Result<()> {
|
pub fn update_from_value(&mut self, value: Value) -> Result<()> {
|
||||||
|
@ -7,8 +7,7 @@ pub mod star_or;
|
|||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
|
|
||||||
pub use milli;
|
pub use milli;
|
||||||
pub use milli::heed;
|
pub use milli::{heed, Index};
|
||||||
pub use milli::Index;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
pub type Document = serde_json::Map<String, serde_json::Value>;
|
pub type Document = serde_json::Map<String, serde_json::Value>;
|
||||||
|
@ -376,9 +376,8 @@ pub fn settings(
|
|||||||
index: &Index,
|
index: &Index,
|
||||||
rtxn: &crate::heed::RoTxn,
|
rtxn: &crate::heed::RoTxn,
|
||||||
) -> Result<Settings<Checked>, milli::Error> {
|
) -> Result<Settings<Checked>, milli::Error> {
|
||||||
let displayed_attributes = index
|
let displayed_attributes =
|
||||||
.displayed_fields(rtxn)?
|
index.displayed_fields(rtxn)?.map(|fields| fields.into_iter().map(String::from).collect());
|
||||||
.map(|fields| fields.into_iter().map(String::from).collect());
|
|
||||||
|
|
||||||
let searchable_attributes = index
|
let searchable_attributes = index
|
||||||
.user_defined_searchable_fields(rtxn)?
|
.user_defined_searchable_fields(rtxn)?
|
||||||
@ -388,11 +387,7 @@ pub fn settings(
|
|||||||
|
|
||||||
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
|
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
|
||||||
|
|
||||||
let criteria = index
|
let criteria = index.criteria(rtxn)?.into_iter().map(|c| c.to_string()).collect();
|
||||||
.criteria(rtxn)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|c| c.to_string())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let stop_words = index
|
let stop_words = index
|
||||||
.stop_words(rtxn)?
|
.stop_words(rtxn)?
|
||||||
@ -408,12 +403,7 @@ pub fn settings(
|
|||||||
let synonyms = index
|
let synonyms = index
|
||||||
.synonyms(rtxn)?
|
.synonyms(rtxn)?
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(key, values)| {
|
.map(|(key, values)| (key.join(" "), values.iter().map(|value| value.join(" ")).collect()))
|
||||||
(
|
|
||||||
key.join(" "),
|
|
||||||
values.iter().map(|value| value.join(" ")).collect(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let min_typo_word_len = MinWordSizeTyposSetting {
|
let min_typo_word_len = MinWordSizeTyposSetting {
|
||||||
@ -426,11 +416,7 @@ pub fn settings(
|
|||||||
None => BTreeSet::new(),
|
None => BTreeSet::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let disabled_attributes = index
|
let disabled_attributes = index.exact_attributes(rtxn)?.into_iter().map(String::from).collect();
|
||||||
.exact_attributes(rtxn)?
|
|
||||||
.into_iter()
|
|
||||||
.map(String::from)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let typo_tolerance = TypoSettings {
|
let typo_tolerance = TypoSettings {
|
||||||
enabled: Setting::Set(index.authorize_typos(rtxn)?),
|
enabled: Setting::Set(index.authorize_typos(rtxn)?),
|
||||||
@ -441,17 +427,13 @@ pub fn settings(
|
|||||||
|
|
||||||
let faceting = FacetingSettings {
|
let faceting = FacetingSettings {
|
||||||
max_values_per_facet: Setting::Set(
|
max_values_per_facet: Setting::Set(
|
||||||
index
|
index.max_values_per_facet(rtxn)?.unwrap_or(DEFAULT_VALUES_PER_FACET),
|
||||||
.max_values_per_facet(rtxn)?
|
|
||||||
.unwrap_or(DEFAULT_VALUES_PER_FACET),
|
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
let pagination = PaginationSettings {
|
let pagination = PaginationSettings {
|
||||||
max_total_hits: Setting::Set(
|
max_total_hits: Setting::Set(
|
||||||
index
|
index.pagination_max_total_hits(rtxn)?.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS),
|
||||||
.pagination_max_total_hits(rtxn)?
|
|
||||||
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS),
|
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -487,11 +469,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
|
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
|
||||||
prop_oneof![
|
prop_oneof![Just(Setting::NotSet), Just(Setting::Reset), any::<T>().prop_map(Setting::Set)]
|
||||||
Just(Setting::NotSet),
|
|
||||||
Just(Setting::Reset),
|
|
||||||
any::<T>().prop_map(Setting::Set)
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -514,10 +492,7 @@ pub(crate) mod test {
|
|||||||
|
|
||||||
let checked = settings.clone().check();
|
let checked = settings.clone().check();
|
||||||
assert_eq!(settings.displayed_attributes, checked.displayed_attributes);
|
assert_eq!(settings.displayed_attributes, checked.displayed_attributes);
|
||||||
assert_eq!(
|
assert_eq!(settings.searchable_attributes, checked.searchable_attributes);
|
||||||
settings.searchable_attributes,
|
|
||||||
checked.searchable_attributes
|
|
||||||
);
|
|
||||||
|
|
||||||
// test wildcard
|
// test wildcard
|
||||||
// test no changes
|
// test no changes
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
use serde::de::Visitor;
|
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use serde::de::Visitor;
|
||||||
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
|
|
||||||
/// A type that tries to match either a star (*) or
|
/// A type that tries to match either a star (*) or
|
||||||
/// any other thing that implements `FromStr`.
|
/// any other thing that implements `FromStr`.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -121,9 +122,10 @@ where
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn star_or_serde_roundtrip() {
|
fn star_or_serde_roundtrip() {
|
||||||
fn roundtrip(content: Value, expected: StarOr<String>) {
|
fn roundtrip(content: Value, expected: StarOr<String>) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use std::fmt::{Display, Write};
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::fmt::{Display, Write};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use enum_iterator::Sequence;
|
use enum_iterator::Sequence;
|
||||||
@ -9,12 +9,10 @@ use serde::{Deserialize, Serialize, Serializer};
|
|||||||
use time::{Duration, OffsetDateTime};
|
use time::{Duration, OffsetDateTime};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{
|
use crate::error::{Code, ResponseError};
|
||||||
error::{Code, ResponseError},
|
use crate::keys::Key;
|
||||||
keys::Key,
|
use crate::settings::{Settings, Unchecked};
|
||||||
settings::{Settings, Unchecked},
|
use crate::InstanceUid;
|
||||||
InstanceUid,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub type TaskId = u32;
|
pub type TaskId = u32;
|
||||||
|
|
||||||
@ -66,9 +64,7 @@ impl Task {
|
|||||||
/// Return the content-uuid if there is one
|
/// Return the content-uuid if there is one
|
||||||
pub fn content_uuid(&self) -> Option<&Uuid> {
|
pub fn content_uuid(&self) -> Option<&Uuid> {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
KindWithContent::DocumentImport {
|
KindWithContent::DocumentImport { ref content_file, .. } => Some(content_file),
|
||||||
ref content_file, ..
|
|
||||||
} => Some(content_file),
|
|
||||||
KindWithContent::DocumentDeletion { .. }
|
KindWithContent::DocumentDeletion { .. }
|
||||||
| KindWithContent::DocumentClear { .. }
|
| KindWithContent::DocumentClear { .. }
|
||||||
| KindWithContent::Settings { .. }
|
| KindWithContent::Settings { .. }
|
||||||
@ -183,33 +179,32 @@ impl KindWithContent {
|
|||||||
/// `None` if it cannot be generated.
|
/// `None` if it cannot be generated.
|
||||||
pub fn default_details(&self) -> Option<Details> {
|
pub fn default_details(&self) -> Option<Details> {
|
||||||
match self {
|
match self {
|
||||||
KindWithContent::DocumentImport {
|
KindWithContent::DocumentImport { documents_count, .. } => {
|
||||||
documents_count, ..
|
Some(Details::DocumentAddition {
|
||||||
} => Some(Details::DocumentAddition {
|
received_documents: *documents_count,
|
||||||
received_documents: *documents_count,
|
indexed_documents: None,
|
||||||
indexed_documents: None,
|
})
|
||||||
}),
|
}
|
||||||
KindWithContent::DocumentDeletion {
|
KindWithContent::DocumentDeletion { index_uid: _, documents_ids } => {
|
||||||
index_uid: _,
|
Some(Details::DocumentDeletion {
|
||||||
documents_ids,
|
received_document_ids: documents_ids.len(),
|
||||||
} => Some(Details::DocumentDeletion {
|
deleted_documents: None,
|
||||||
received_document_ids: documents_ids.len(),
|
})
|
||||||
deleted_documents: None,
|
}
|
||||||
}),
|
KindWithContent::DocumentClear { .. } => {
|
||||||
KindWithContent::DocumentClear { .. } => Some(Details::ClearAll {
|
Some(Details::ClearAll { deleted_documents: None })
|
||||||
deleted_documents: None,
|
}
|
||||||
}),
|
KindWithContent::Settings { new_settings, .. } => {
|
||||||
KindWithContent::Settings { new_settings, .. } => Some(Details::Settings {
|
Some(Details::Settings { settings: new_settings.clone() })
|
||||||
settings: new_settings.clone(),
|
}
|
||||||
}),
|
|
||||||
KindWithContent::IndexDeletion { .. } => None,
|
KindWithContent::IndexDeletion { .. } => None,
|
||||||
KindWithContent::IndexCreation { primary_key, .. }
|
KindWithContent::IndexCreation { primary_key, .. }
|
||||||
| KindWithContent::IndexUpdate { primary_key, .. } => Some(Details::IndexInfo {
|
| KindWithContent::IndexUpdate { primary_key, .. } => {
|
||||||
primary_key: primary_key.clone(),
|
Some(Details::IndexInfo { primary_key: primary_key.clone() })
|
||||||
}),
|
}
|
||||||
KindWithContent::IndexSwap { swaps } => Some(Details::IndexSwap {
|
KindWithContent::IndexSwap { swaps } => {
|
||||||
swaps: swaps.clone(),
|
Some(Details::IndexSwap { swaps: swaps.clone() })
|
||||||
}),
|
}
|
||||||
KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation {
|
KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation {
|
||||||
matched_tasks: tasks.len(),
|
matched_tasks: tasks.len(),
|
||||||
canceled_tasks: None,
|
canceled_tasks: None,
|
||||||
@ -227,30 +222,29 @@ impl KindWithContent {
|
|||||||
|
|
||||||
pub fn default_finished_details(&self) -> Option<Details> {
|
pub fn default_finished_details(&self) -> Option<Details> {
|
||||||
match self {
|
match self {
|
||||||
KindWithContent::DocumentImport {
|
KindWithContent::DocumentImport { documents_count, .. } => {
|
||||||
documents_count, ..
|
Some(Details::DocumentAddition {
|
||||||
} => Some(Details::DocumentAddition {
|
received_documents: *documents_count,
|
||||||
received_documents: *documents_count,
|
indexed_documents: Some(0),
|
||||||
indexed_documents: Some(0),
|
})
|
||||||
}),
|
}
|
||||||
KindWithContent::DocumentDeletion {
|
KindWithContent::DocumentDeletion { index_uid: _, documents_ids } => {
|
||||||
index_uid: _,
|
Some(Details::DocumentDeletion {
|
||||||
documents_ids,
|
received_document_ids: documents_ids.len(),
|
||||||
} => Some(Details::DocumentDeletion {
|
deleted_documents: Some(0),
|
||||||
received_document_ids: documents_ids.len(),
|
})
|
||||||
deleted_documents: Some(0),
|
}
|
||||||
}),
|
KindWithContent::DocumentClear { .. } => {
|
||||||
KindWithContent::DocumentClear { .. } => Some(Details::ClearAll {
|
Some(Details::ClearAll { deleted_documents: None })
|
||||||
deleted_documents: None,
|
}
|
||||||
}),
|
KindWithContent::Settings { new_settings, .. } => {
|
||||||
KindWithContent::Settings { new_settings, .. } => Some(Details::Settings {
|
Some(Details::Settings { settings: new_settings.clone() })
|
||||||
settings: new_settings.clone(),
|
}
|
||||||
}),
|
|
||||||
KindWithContent::IndexDeletion { .. } => None,
|
KindWithContent::IndexDeletion { .. } => None,
|
||||||
KindWithContent::IndexCreation { primary_key, .. }
|
KindWithContent::IndexCreation { primary_key, .. }
|
||||||
| KindWithContent::IndexUpdate { primary_key, .. } => Some(Details::IndexInfo {
|
| KindWithContent::IndexUpdate { primary_key, .. } => {
|
||||||
primary_key: primary_key.clone(),
|
Some(Details::IndexInfo { primary_key: primary_key.clone() })
|
||||||
}),
|
}
|
||||||
KindWithContent::IndexSwap { .. } => {
|
KindWithContent::IndexSwap { .. } => {
|
||||||
todo!()
|
todo!()
|
||||||
}
|
}
|
||||||
@ -273,24 +267,24 @@ impl KindWithContent {
|
|||||||
impl From<&KindWithContent> for Option<Details> {
|
impl From<&KindWithContent> for Option<Details> {
|
||||||
fn from(kind: &KindWithContent) -> Self {
|
fn from(kind: &KindWithContent) -> Self {
|
||||||
match kind {
|
match kind {
|
||||||
KindWithContent::DocumentImport {
|
KindWithContent::DocumentImport { documents_count, .. } => {
|
||||||
documents_count, ..
|
Some(Details::DocumentAddition {
|
||||||
} => Some(Details::DocumentAddition {
|
received_documents: *documents_count,
|
||||||
received_documents: *documents_count,
|
indexed_documents: None,
|
||||||
indexed_documents: None,
|
})
|
||||||
}),
|
}
|
||||||
KindWithContent::DocumentDeletion { .. } => None,
|
KindWithContent::DocumentDeletion { .. } => None,
|
||||||
KindWithContent::DocumentClear { .. } => None,
|
KindWithContent::DocumentClear { .. } => None,
|
||||||
KindWithContent::Settings { new_settings, .. } => Some(Details::Settings {
|
KindWithContent::Settings { new_settings, .. } => {
|
||||||
settings: new_settings.clone(),
|
Some(Details::Settings { settings: new_settings.clone() })
|
||||||
}),
|
}
|
||||||
KindWithContent::IndexDeletion { .. } => None,
|
KindWithContent::IndexDeletion { .. } => None,
|
||||||
KindWithContent::IndexCreation { primary_key, .. } => Some(Details::IndexInfo {
|
KindWithContent::IndexCreation { primary_key, .. } => {
|
||||||
primary_key: primary_key.clone(),
|
Some(Details::IndexInfo { primary_key: primary_key.clone() })
|
||||||
}),
|
}
|
||||||
KindWithContent::IndexUpdate { primary_key, .. } => Some(Details::IndexInfo {
|
KindWithContent::IndexUpdate { primary_key, .. } => {
|
||||||
primary_key: primary_key.clone(),
|
Some(Details::IndexInfo { primary_key: primary_key.clone() })
|
||||||
}),
|
}
|
||||||
KindWithContent::IndexSwap { .. } => None,
|
KindWithContent::IndexSwap { .. } => None,
|
||||||
KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation {
|
KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation {
|
||||||
matched_tasks: tasks.len(),
|
matched_tasks: tasks.len(),
|
||||||
@ -302,9 +296,9 @@ impl From<&KindWithContent> for Option<Details> {
|
|||||||
deleted_tasks: None,
|
deleted_tasks: None,
|
||||||
original_query: query.clone(),
|
original_query: query.clone(),
|
||||||
}),
|
}),
|
||||||
KindWithContent::DumpExport { dump_uid, .. } => Some(Details::Dump {
|
KindWithContent::DumpExport { dump_uid, .. } => {
|
||||||
dump_uid: dump_uid.clone(),
|
Some(Details::Dump { dump_uid: dump_uid.clone() })
|
||||||
}),
|
}
|
||||||
KindWithContent::Snapshot => None,
|
KindWithContent::Snapshot => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -514,9 +508,9 @@ pub fn serialize_duration<S: Serializer>(
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::heed::{types::SerdeJson, BytesDecode, BytesEncode};
|
|
||||||
|
|
||||||
use super::Details;
|
use super::Details;
|
||||||
|
use crate::heed::types::SerdeJson;
|
||||||
|
use crate::heed::{BytesDecode, BytesEncode};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn bad_deser() {
|
fn bad_deser() {
|
||||||
|
@ -25,11 +25,7 @@ const SPLIT_SYMBOL: char = '.';
|
|||||||
/// ```
|
/// ```
|
||||||
fn contained_in(selector: &str, key: &str) -> bool {
|
fn contained_in(selector: &str, key: &str) -> bool {
|
||||||
selector.starts_with(key)
|
selector.starts_with(key)
|
||||||
&& selector[key.len()..]
|
&& selector[key.len()..].chars().next().map(|c| c == SPLIT_SYMBOL).unwrap_or(true)
|
||||||
.chars()
|
|
||||||
.next()
|
|
||||||
.map(|c| c == SPLIT_SYMBOL)
|
|
||||||
.unwrap_or(true)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Map the selected leaf values of a json allowing you to update only the fields that were selected.
|
/// Map the selected leaf values of a json allowing you to update only the fields that were selected.
|
||||||
@ -244,10 +240,7 @@ mod tests {
|
|||||||
fn test_contained_in() {
|
fn test_contained_in() {
|
||||||
assert!(contained_in("animaux", "animaux"));
|
assert!(contained_in("animaux", "animaux"));
|
||||||
assert!(contained_in("animaux.chien", "animaux"));
|
assert!(contained_in("animaux.chien", "animaux"));
|
||||||
assert!(contained_in(
|
assert!(contained_in("animaux.chien.race.bouvier bernois.fourrure.couleur", "animaux"));
|
||||||
"animaux.chien.race.bouvier bernois.fourrure.couleur",
|
|
||||||
"animaux"
|
|
||||||
));
|
|
||||||
assert!(contained_in(
|
assert!(contained_in(
|
||||||
"animaux.chien.race.bouvier bernois.fourrure.couleur",
|
"animaux.chien.race.bouvier bernois.fourrure.couleur",
|
||||||
"animaux.chien"
|
"animaux.chien"
|
||||||
@ -726,14 +719,12 @@ mod tests {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
map_leaf_values(
|
map_leaf_values(value.as_object_mut().unwrap(), ["jean.race.name"], |key, value| {
|
||||||
value.as_object_mut().unwrap(),
|
match (value, key) {
|
||||||
["jean.race.name"],
|
|
||||||
|key, value| match (value, key) {
|
|
||||||
(Value::String(name), "jean.race.name") => *name = S("patou"),
|
(Value::String(name), "jean.race.name") => *name = S("patou"),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
},
|
}
|
||||||
);
|
});
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
value,
|
value,
|
||||||
|
Loading…
Reference in New Issue
Block a user