mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-25 19:45:05 +08:00
get rids of the log dependencies everywhere
This commit is contained in:
parent
bcf7909bba
commit
7ff722b72e
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -1543,7 +1543,6 @@ dependencies = [
|
|||||||
"big_s",
|
"big_s",
|
||||||
"flate2",
|
"flate2",
|
||||||
"http 0.2.11",
|
"http 0.2.11",
|
||||||
"log",
|
|
||||||
"maplit",
|
"maplit",
|
||||||
"meili-snap",
|
"meili-snap",
|
||||||
"meilisearch-auth",
|
"meilisearch-auth",
|
||||||
@ -1557,6 +1556,7 @@ dependencies = [
|
|||||||
"tempfile",
|
"tempfile",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"time",
|
"time",
|
||||||
|
"tracing",
|
||||||
"uuid",
|
"uuid",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -2943,7 +2943,6 @@ dependencies = [
|
|||||||
"file-store",
|
"file-store",
|
||||||
"flate2",
|
"flate2",
|
||||||
"insta",
|
"insta",
|
||||||
"log",
|
|
||||||
"meili-snap",
|
"meili-snap",
|
||||||
"meilisearch-auth",
|
"meilisearch-auth",
|
||||||
"meilisearch-types",
|
"meilisearch-types",
|
||||||
@ -3701,7 +3700,6 @@ dependencies = [
|
|||||||
"itertools 0.11.0",
|
"itertools 0.11.0",
|
||||||
"jsonwebtoken",
|
"jsonwebtoken",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
|
||||||
"manifest-dir-macros",
|
"manifest-dir-macros",
|
||||||
"maplit",
|
"maplit",
|
||||||
"meili-snap",
|
"meili-snap",
|
||||||
|
@ -14,7 +14,6 @@ license.workspace = true
|
|||||||
anyhow = "1.0.79"
|
anyhow = "1.0.79"
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.28"
|
||||||
http = "0.2.11"
|
http = "0.2.11"
|
||||||
log = "0.4.20"
|
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.19.0"
|
||||||
@ -26,6 +25,7 @@ tar = "0.4.40"
|
|||||||
tempfile = "3.9.0"
|
tempfile = "3.9.0"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.56"
|
||||||
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
|
tracing = "0.1.40"
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
@ -120,7 +120,7 @@ impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
|||||||
criterion.as_ref().map(ToString::to_string)
|
criterion.as_ref().map(ToString::to_string)
|
||||||
}
|
}
|
||||||
Err(()) => {
|
Err(()) => {
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"Could not import the following ranking rule: `{}`.",
|
"Could not import the following ranking rule: `{}`.",
|
||||||
ranking_rule
|
ranking_rule
|
||||||
);
|
);
|
||||||
@ -152,11 +152,11 @@ impl From<v1::update::UpdateStatus> for Option<v2::updates::UpdateStatus> {
|
|||||||
use v2::updates::UpdateStatus as UpdateStatusV2;
|
use v2::updates::UpdateStatus as UpdateStatusV2;
|
||||||
Some(match source {
|
Some(match source {
|
||||||
UpdateStatusV1::Enqueued { content } => {
|
UpdateStatusV1::Enqueued { content } => {
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"Cannot import task {} (importing enqueued tasks from v1 dumps is unsupported)",
|
"Cannot import task {} (importing enqueued tasks from v1 dumps is unsupported)",
|
||||||
content.update_id
|
content.update_id
|
||||||
);
|
);
|
||||||
log::warn!("Task will be skipped in the queue of imported tasks.");
|
tracing::warn!("Task will be skipped in the queue of imported tasks.");
|
||||||
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@ -229,7 +229,7 @@ impl From<v1::update::UpdateType> for Option<v2::updates::UpdateMeta> {
|
|||||||
Some(match source {
|
Some(match source {
|
||||||
v1::update::UpdateType::ClearAll => v2::updates::UpdateMeta::ClearDocuments,
|
v1::update::UpdateType::ClearAll => v2::updates::UpdateMeta::ClearDocuments,
|
||||||
v1::update::UpdateType::Customs => {
|
v1::update::UpdateType::Customs => {
|
||||||
log::warn!("Ignoring task with type 'Customs' that is no longer supported");
|
tracing::warn!("Ignoring task with type 'Customs' that is no longer supported");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
v1::update::UpdateType::DocumentsAddition { .. } => {
|
v1::update::UpdateType::DocumentsAddition { .. } => {
|
||||||
@ -296,7 +296,7 @@ impl From<v1::settings::RankingRule> for Option<v2::settings::Criterion> {
|
|||||||
v1::settings::RankingRule::Proximity => Some(v2::settings::Criterion::Proximity),
|
v1::settings::RankingRule::Proximity => Some(v2::settings::Criterion::Proximity),
|
||||||
v1::settings::RankingRule::Attribute => Some(v2::settings::Criterion::Attribute),
|
v1::settings::RankingRule::Attribute => Some(v2::settings::Criterion::Attribute),
|
||||||
v1::settings::RankingRule::WordsPosition => {
|
v1::settings::RankingRule::WordsPosition => {
|
||||||
log::warn!("Removing the 'WordsPosition' ranking rule that is no longer supported, please check the resulting ranking rules of your indexes");
|
tracing::warn!("Removing the 'WordsPosition' ranking rule that is no longer supported, please check the resulting ranking rules of your indexes");
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
v1::settings::RankingRule::Exactness => Some(v2::settings::Criterion::Exactness),
|
v1::settings::RankingRule::Exactness => Some(v2::settings::Criterion::Exactness),
|
||||||
|
@ -146,8 +146,8 @@ impl From<v2::updates::UpdateStatus> for v3::updates::UpdateStatus {
|
|||||||
started_processing_at: processing.started_processing_at,
|
started_processing_at: processing.started_processing_at,
|
||||||
}),
|
}),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::warn!("Error with task {}: {}", processing.from.update_id, e);
|
tracing::warn!("Error with task {}: {}", processing.from.update_id, e);
|
||||||
log::warn!("Task will be marked as `Failed`.");
|
tracing::warn!("Task will be marked as `Failed`.");
|
||||||
v3::updates::UpdateStatus::Failed(v3::updates::Failed {
|
v3::updates::UpdateStatus::Failed(v3::updates::Failed {
|
||||||
from: v3::updates::Processing {
|
from: v3::updates::Processing {
|
||||||
from: v3::updates::Enqueued {
|
from: v3::updates::Enqueued {
|
||||||
@ -172,8 +172,8 @@ impl From<v2::updates::UpdateStatus> for v3::updates::UpdateStatus {
|
|||||||
enqueued_at: enqueued.enqueued_at,
|
enqueued_at: enqueued.enqueued_at,
|
||||||
}),
|
}),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::warn!("Error with task {}: {}", enqueued.update_id, e);
|
tracing::warn!("Error with task {}: {}", enqueued.update_id, e);
|
||||||
log::warn!("Task will be marked as `Failed`.");
|
tracing::warn!("Task will be marked as `Failed`.");
|
||||||
v3::updates::UpdateStatus::Failed(v3::updates::Failed {
|
v3::updates::UpdateStatus::Failed(v3::updates::Failed {
|
||||||
from: v3::updates::Processing {
|
from: v3::updates::Processing {
|
||||||
from: v3::updates::Enqueued {
|
from: v3::updates::Enqueued {
|
||||||
@ -353,7 +353,7 @@ impl From<String> for v3::Code {
|
|||||||
"malformed_payload" => v3::Code::MalformedPayload,
|
"malformed_payload" => v3::Code::MalformedPayload,
|
||||||
"missing_payload" => v3::Code::MissingPayload,
|
"missing_payload" => v3::Code::MissingPayload,
|
||||||
other => {
|
other => {
|
||||||
log::warn!("Unknown error code {}", other);
|
tracing::warn!("Unknown error code {}", other);
|
||||||
v3::Code::UnretrievableErrorCode
|
v3::Code::UnretrievableErrorCode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -76,20 +76,20 @@ impl CompatV3ToV4 {
|
|||||||
let index_uid = match index_uid {
|
let index_uid = match index_uid {
|
||||||
Some(uid) => uid,
|
Some(uid) => uid,
|
||||||
None => {
|
None => {
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"Error while importing the update {}.",
|
"Error while importing the update {}.",
|
||||||
task.update.id()
|
task.update.id()
|
||||||
);
|
);
|
||||||
log::warn!(
|
tracing::warn!(
|
||||||
"The index associated to the uuid `{}` could not be retrieved.",
|
"The index associated to the uuid `{}` could not be retrieved.",
|
||||||
task.uuid.to_string()
|
task.uuid.to_string()
|
||||||
);
|
);
|
||||||
if task.update.is_finished() {
|
if task.update.is_finished() {
|
||||||
// we're fucking with his history but not his data, that's ok-ish.
|
// we're fucking with his history but not his data, that's ok-ish.
|
||||||
log::warn!("The index-uuid will be set as `unknown`.");
|
tracing::warn!("The index-uuid will be set as `unknown`.");
|
||||||
String::from("unknown")
|
String::from("unknown")
|
||||||
} else {
|
} else {
|
||||||
log::warn!("The task will be ignored.");
|
tracing::warn!("The task will be ignored.");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -305,7 +305,7 @@ impl From<v4::ResponseError> for v5::ResponseError {
|
|||||||
"invalid_api_key_expires_at" => v5::Code::InvalidApiKeyExpiresAt,
|
"invalid_api_key_expires_at" => v5::Code::InvalidApiKeyExpiresAt,
|
||||||
"invalid_api_key_description" => v5::Code::InvalidApiKeyDescription,
|
"invalid_api_key_description" => v5::Code::InvalidApiKeyDescription,
|
||||||
other => {
|
other => {
|
||||||
log::warn!("Unknown error code {}", other);
|
tracing::warn!("Unknown error code {}", other);
|
||||||
v5::Code::UnretrievableErrorCode
|
v5::Code::UnretrievableErrorCode
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -304,7 +304,7 @@ impl From<v5::ResponseError> for v6::ResponseError {
|
|||||||
"immutable_field" => v6::Code::BadRequest,
|
"immutable_field" => v6::Code::BadRequest,
|
||||||
"api_key_already_exists" => v6::Code::ApiKeyAlreadyExists,
|
"api_key_already_exists" => v6::Code::ApiKeyAlreadyExists,
|
||||||
other => {
|
other => {
|
||||||
log::warn!("Unknown error code {}", other);
|
tracing::warn!("Unknown error code {}", other);
|
||||||
v6::Code::UnretrievableErrorCode
|
v6::Code::UnretrievableErrorCode
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -329,7 +329,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
new_ranking_rules.push(new_rule);
|
new_ranking_rules.push(new_rule);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
log::warn!("Error while importing settings. The ranking rule `{rule}` does not exist anymore.")
|
tracing::warn!("Error while importing settings. The ranking rule `{rule}` does not exist anymore.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,10 @@ use std::fs::{self, File};
|
|||||||
use std::io::{BufRead, BufReader, ErrorKind};
|
use std::io::{BufRead, BufReader, ErrorKind};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use log::debug;
|
|
||||||
pub use meilisearch_types::milli;
|
pub use meilisearch_types::milli;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
use tracing::debug;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::Document;
|
use super::Document;
|
||||||
|
@ -19,7 +19,6 @@ dump = { path = "../dump" }
|
|||||||
enum-iterator = "1.5.0"
|
enum-iterator = "1.5.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.28"
|
||||||
log = "0.4.20"
|
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
page_size = "0.5.0"
|
page_size = "0.5.0"
|
||||||
|
@ -3,13 +3,13 @@ use std::sync::{Arc, RwLock};
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use std::{fs, thread};
|
use std::{fs, thread};
|
||||||
|
|
||||||
use log::error;
|
|
||||||
use meilisearch_types::heed::types::{SerdeJson, Str};
|
use meilisearch_types::heed::types::{SerdeJson, Str};
|
||||||
use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn};
|
use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn};
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
use meilisearch_types::milli::{FieldDistribution, Index};
|
use meilisearch_types::milli::{FieldDistribution, Index};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
use tracing::error;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use self::index_map::IndexMap;
|
use self::index_map::IndexMap;
|
||||||
|
@ -535,17 +535,17 @@ impl IndexScheduler {
|
|||||||
let budget = if Self::is_good_heed(tasks_path, DEFAULT_BUDGET) {
|
let budget = if Self::is_good_heed(tasks_path, DEFAULT_BUDGET) {
|
||||||
DEFAULT_BUDGET
|
DEFAULT_BUDGET
|
||||||
} else {
|
} else {
|
||||||
log::debug!("determining budget with dichotomic search");
|
tracing::debug!("determining budget with dichotomic search");
|
||||||
utils::dichotomic_search(DEFAULT_BUDGET / 2, |map_size| {
|
utils::dichotomic_search(DEFAULT_BUDGET / 2, |map_size| {
|
||||||
Self::is_good_heed(tasks_path, map_size)
|
Self::is_good_heed(tasks_path, map_size)
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
log::debug!("memmap budget: {budget}B");
|
tracing::debug!("memmap budget: {budget}B");
|
||||||
let mut budget = budget / 2;
|
let mut budget = budget / 2;
|
||||||
if task_db_size > (budget / 2) {
|
if task_db_size > (budget / 2) {
|
||||||
task_db_size = clamp_to_page_size(budget * 2 / 5);
|
task_db_size = clamp_to_page_size(budget * 2 / 5);
|
||||||
log::debug!(
|
tracing::debug!(
|
||||||
"Decreasing max size of task DB to {task_db_size}B due to constrained memory space"
|
"Decreasing max size of task DB to {task_db_size}B due to constrained memory space"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -555,13 +555,13 @@ impl IndexScheduler {
|
|||||||
let budget = budget;
|
let budget = budget;
|
||||||
let task_db_size = task_db_size;
|
let task_db_size = task_db_size;
|
||||||
|
|
||||||
log::debug!("index budget: {budget}B");
|
tracing::debug!("index budget: {budget}B");
|
||||||
let mut index_count = budget / base_map_size;
|
let mut index_count = budget / base_map_size;
|
||||||
if index_count < 2 {
|
if index_count < 2 {
|
||||||
// take a bit less than half than the budget to make sure we can always afford to open an index
|
// take a bit less than half than the budget to make sure we can always afford to open an index
|
||||||
let map_size = (budget * 2) / 5;
|
let map_size = (budget * 2) / 5;
|
||||||
// single index of max budget
|
// single index of max budget
|
||||||
log::debug!("1 index of {map_size}B can be opened simultaneously.");
|
tracing::debug!("1 index of {map_size}B can be opened simultaneously.");
|
||||||
return IndexBudget { map_size, index_count: 1, task_db_size };
|
return IndexBudget { map_size, index_count: 1, task_db_size };
|
||||||
}
|
}
|
||||||
// give us some space for an additional index when the cache is already full
|
// give us some space for an additional index when the cache is already full
|
||||||
@ -570,7 +570,7 @@ impl IndexScheduler {
|
|||||||
if index_count > max_index_count {
|
if index_count > max_index_count {
|
||||||
index_count = max_index_count;
|
index_count = max_index_count;
|
||||||
}
|
}
|
||||||
log::debug!("Up to {index_count} indexes of {base_map_size}B opened simultaneously.");
|
tracing::debug!("Up to {index_count} indexes of {base_map_size}B opened simultaneously.");
|
||||||
IndexBudget { map_size: base_map_size, index_count, task_db_size }
|
IndexBudget { map_size: base_map_size, index_count, task_db_size }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -617,7 +617,7 @@ impl IndexScheduler {
|
|||||||
Ok(TickOutcome::TickAgain(_)) => (),
|
Ok(TickOutcome::TickAgain(_)) => (),
|
||||||
Ok(TickOutcome::WaitForSignal) => run.wake_up.wait(),
|
Ok(TickOutcome::WaitForSignal) => run.wake_up.wait(),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("{e}");
|
tracing::error!("{e}");
|
||||||
// Wait one second when an irrecoverable error occurs.
|
// Wait one second when an irrecoverable error occurs.
|
||||||
if !e.is_recoverable() {
|
if !e.is_recoverable() {
|
||||||
std::thread::sleep(Duration::from_secs(1));
|
std::thread::sleep(Duration::from_secs(1));
|
||||||
@ -634,15 +634,15 @@ impl IndexScheduler {
|
|||||||
let mut file = match File::create(format!("{}.puffin", now)) {
|
let mut file = match File::create(format!("{}.puffin", now)) {
|
||||||
Ok(file) => file,
|
Ok(file) => file,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("{e}");
|
tracing::error!("{e}");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if let Err(e) = frame_view.save_to_writer(&mut file) {
|
if let Err(e) = frame_view.save_to_writer(&mut file) {
|
||||||
log::error!("{e}");
|
tracing::error!("{e}");
|
||||||
}
|
}
|
||||||
if let Err(e) = file.sync_all() {
|
if let Err(e) = file.sync_all() {
|
||||||
log::error!("{e}");
|
tracing::error!("{e}");
|
||||||
}
|
}
|
||||||
// We erase this frame view as it is no more useful. We want to
|
// We erase this frame view as it is no more useful. We want to
|
||||||
// measure the new frames now that we exported the previous ones.
|
// measure the new frames now that we exported the previous ones.
|
||||||
@ -1190,7 +1190,7 @@ impl IndexScheduler {
|
|||||||
self.update_task(&mut wtxn, &task)
|
self.update_task(&mut wtxn, &task)
|
||||||
.map_err(|e| Error::TaskDatabaseUpdate(Box::new(e)))?;
|
.map_err(|e| Error::TaskDatabaseUpdate(Box::new(e)))?;
|
||||||
if let Err(e) = self.delete_persisted_task_data(&task) {
|
if let Err(e) = self.delete_persisted_task_data(&task) {
|
||||||
log::error!("Failure to delete the content files associated with task {}. Error: {e}", task.uid);
|
tracing::error!("Failure to delete the content files associated with task {}. Error: {e}", task.uid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tracing::info!("A batch of tasks was successfully completed.");
|
tracing::info!("A batch of tasks was successfully completed.");
|
||||||
@ -1247,7 +1247,7 @@ impl IndexScheduler {
|
|||||||
self.maybe_fail(tests::FailureLocation::UpdatingTaskAfterProcessBatchFailure)?;
|
self.maybe_fail(tests::FailureLocation::UpdatingTaskAfterProcessBatchFailure)?;
|
||||||
|
|
||||||
if let Err(e) = self.delete_persisted_task_data(&task) {
|
if let Err(e) = self.delete_persisted_task_data(&task) {
|
||||||
log::error!("Failure to delete the content files associated with task {}. Error: {e}", task.uid);
|
tracing::error!("Failure to delete the content files associated with task {}. Error: {e}", task.uid);
|
||||||
}
|
}
|
||||||
self.update_task(&mut wtxn, &task)
|
self.update_task(&mut wtxn, &task)
|
||||||
.map_err(|e| Error::TaskDatabaseUpdate(Box::new(e)))?;
|
.map_err(|e| Error::TaskDatabaseUpdate(Box::new(e)))?;
|
||||||
@ -1341,7 +1341,7 @@ impl IndexScheduler {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Err(e) = request.send(reader) {
|
if let Err(e) = request.send(reader) {
|
||||||
log::error!("While sending data to the webhook: {e}");
|
tracing::error!("While sending data to the webhook: {e}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1367,12 +1367,12 @@ impl IndexScheduler {
|
|||||||
// /!\ the len must be at least 2 or else we might enter an infinite loop where we only delete
|
// /!\ the len must be at least 2 or else we might enter an infinite loop where we only delete
|
||||||
// the deletion tasks we enqueued ourselves.
|
// the deletion tasks we enqueued ourselves.
|
||||||
if to_delete.len() < 2 {
|
if to_delete.len() < 2 {
|
||||||
log::warn!("The task queue is almost full, but no task can be deleted yet.");
|
tracing::warn!("The task queue is almost full, but no task can be deleted yet.");
|
||||||
// the only thing we can do is hope that the user tasks are going to finish
|
// the only thing we can do is hope that the user tasks are going to finish
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
log::info!(
|
tracing::info!(
|
||||||
"The task queue is almost full. Deleting the oldest {} finished tasks.",
|
"The task queue is almost full. Deleting the oldest {} finished tasks.",
|
||||||
to_delete.len()
|
to_delete.len()
|
||||||
);
|
);
|
||||||
|
@ -54,7 +54,6 @@ is-terminal = "0.4.10"
|
|||||||
itertools = "0.11.0"
|
itertools = "0.11.0"
|
||||||
jsonwebtoken = "8.3.0"
|
jsonwebtoken = "8.3.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.20"
|
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
mimalloc = { version = "0.1.39", default-features = false }
|
mimalloc = { version = "0.1.39", default-features = false }
|
||||||
|
@ -29,7 +29,6 @@ use error::PayloadError;
|
|||||||
use extractors::payload::PayloadConfig;
|
use extractors::payload::PayloadConfig;
|
||||||
use http::header::CONTENT_TYPE;
|
use http::header::CONTENT_TYPE;
|
||||||
use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
||||||
use log::error;
|
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||||
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
||||||
@ -39,6 +38,7 @@ use meilisearch_types::versioning::{check_version_file, create_version_file};
|
|||||||
use meilisearch_types::{compression, milli, VERSION_FILE_NAME};
|
use meilisearch_types::{compression, milli, VERSION_FILE_NAME};
|
||||||
pub use option::Opt;
|
pub use option::Opt;
|
||||||
use option::ScheduleSnapshot;
|
use option::ScheduleSnapshot;
|
||||||
|
use tracing::error;
|
||||||
use tracing_subscriber::filter::Targets;
|
use tracing_subscriber::filter::Targets;
|
||||||
|
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
@ -293,13 +293,13 @@ fn import_dump(
|
|||||||
let mut dump_reader = dump::DumpReader::open(reader)?;
|
let mut dump_reader = dump::DumpReader::open(reader)?;
|
||||||
|
|
||||||
if let Some(date) = dump_reader.date() {
|
if let Some(date) = dump_reader.date() {
|
||||||
log::info!(
|
tracing::info!(
|
||||||
"Importing a dump of meilisearch `{:?}` from the {}",
|
"Importing a dump of meilisearch `{:?}` from the {}",
|
||||||
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
||||||
date
|
date
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
log::info!(
|
tracing::info!(
|
||||||
"Importing a dump of meilisearch `{:?}`",
|
"Importing a dump of meilisearch `{:?}`",
|
||||||
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
||||||
);
|
);
|
||||||
@ -335,7 +335,7 @@ fn import_dump(
|
|||||||
for index_reader in dump_reader.indexes()? {
|
for index_reader in dump_reader.indexes()? {
|
||||||
let mut index_reader = index_reader?;
|
let mut index_reader = index_reader?;
|
||||||
let metadata = index_reader.metadata();
|
let metadata = index_reader.metadata();
|
||||||
log::info!("Importing index `{}`.", metadata.uid);
|
tracing::info!("Importing index `{}`.", metadata.uid);
|
||||||
|
|
||||||
let date = Some((metadata.created_at, metadata.updated_at));
|
let date = Some((metadata.created_at, metadata.updated_at));
|
||||||
let index = index_scheduler.create_raw_index(&metadata.uid, date)?;
|
let index = index_scheduler.create_raw_index(&metadata.uid, date)?;
|
||||||
@ -349,14 +349,15 @@ fn import_dump(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 4.2 Import the settings.
|
// 4.2 Import the settings.
|
||||||
log::info!("Importing the settings.");
|
tracing::info!("Importing the settings.");
|
||||||
let settings = index_reader.settings()?;
|
let settings = index_reader.settings()?;
|
||||||
apply_settings_to_builder(&settings, &mut builder);
|
apply_settings_to_builder(&settings, &mut builder);
|
||||||
builder.execute(|indexing_step| log::debug!("update: {:?}", indexing_step), || false)?;
|
builder
|
||||||
|
.execute(|indexing_step| tracing::debug!("update: {:?}", indexing_step), || false)?;
|
||||||
|
|
||||||
// 4.3 Import the documents.
|
// 4.3 Import the documents.
|
||||||
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||||
log::info!("Importing the documents.");
|
tracing::info!("Importing the documents.");
|
||||||
let file = tempfile::tempfile()?;
|
let file = tempfile::tempfile()?;
|
||||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||||
for document in index_reader.documents()? {
|
for document in index_reader.documents()? {
|
||||||
@ -378,15 +379,15 @@ fn import_dump(
|
|||||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
|indexing_step| log::trace!("update: {:?}", indexing_step),
|
|indexing_step| tracing::trace!("update: {:?}", indexing_step),
|
||||||
|| false,
|
|| false,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let (builder, user_result) = builder.add_documents(reader)?;
|
let (builder, user_result) = builder.add_documents(reader)?;
|
||||||
log::info!("{} documents found.", user_result?);
|
tracing::info!("{} documents found.", user_result?);
|
||||||
builder.execute()?;
|
builder.execute()?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
log::info!("All documents successfully imported.");
|
tracing::info!("All documents successfully imported.");
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
use tracing::debug;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
|
@ -3,11 +3,11 @@ use actix_web::{HttpRequest, HttpResponse};
|
|||||||
use deserr::actix_web::AwebJson;
|
use deserr::actix_web::AwebJson;
|
||||||
use deserr::Deserr;
|
use deserr::Deserr;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::keys::actions;
|
use meilisearch_types::keys::actions;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
|
@ -8,7 +8,6 @@ use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
|||||||
use deserr::Deserr;
|
use deserr::Deserr;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::deserr::query_params::Param;
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
||||||
@ -28,6 +27,7 @@ use serde_json::Value;
|
|||||||
use tempfile::tempfile;
|
use tempfile::tempfile;
|
||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
|
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
@ -427,7 +427,7 @@ async fn document_addition(
|
|||||||
Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e)))
|
Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e)))
|
||||||
if e.kind() == ErrorKind::NotFound => {}
|
if e.kind() == ErrorKind::NotFound => {}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::warn!("Unknown error happened while deleting a malformed update file with uuid {uuid}: {e}");
|
tracing::warn!("Unknown error happened while deleting a malformed update file with uuid {uuid}: {e}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// We still want to return the original error to the end user.
|
// We still want to return the original error to the end user.
|
||||||
|
@ -2,12 +2,12 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::AwebJson;
|
use deserr::actix_web::AwebJson;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::{Analytics, FacetSearchAggregator};
|
use crate::analytics::{Analytics, FacetSearchAggregator};
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
|
@ -5,7 +5,6 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
|||||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||||
use deserr::{DeserializeError, Deserr, ValuePointerRef};
|
use deserr::{DeserializeError, Deserr, ValuePointerRef};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::deserr::query_params::Param;
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
@ -16,6 +15,7 @@ use meilisearch_types::tasks::KindWithContent;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
|
@ -2,7 +2,6 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::{debug, warn};
|
|
||||||
use meilisearch_types::deserr::query_params::Param;
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
@ -12,6 +11,7 @@ use meilisearch_types::milli;
|
|||||||
use meilisearch_types::milli::vector::DistributionShift;
|
use meilisearch_types::milli::vector::DistributionShift;
|
||||||
use meilisearch_types::serde_cs::vec::CS;
|
use meilisearch_types::serde_cs::vec::CS;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use crate::analytics::{Analytics, SearchAggregator};
|
use crate::analytics::{Analytics, SearchAggregator};
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
|
@ -2,7 +2,6 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::AwebJson;
|
use deserr::actix_web::AwebJson;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||||
@ -11,6 +10,7 @@ use meilisearch_types::milli::update::Setting;
|
|||||||
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@ -24,12 +24,12 @@ macro_rules! make_setting_route {
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::update::Setting;
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::settings::{settings, Settings};
|
use meilisearch_types::settings::{settings, Settings};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
|
use tracing::debug;
|
||||||
use $crate::analytics::Analytics;
|
use $crate::analytics::Analytics;
|
||||||
use $crate::extractors::authentication::policies::*;
|
use $crate::extractors::authentication::policies::*;
|
||||||
use $crate::extractors::authentication::GuardedData;
|
use $crate::extractors::authentication::GuardedData;
|
||||||
|
@ -3,7 +3,6 @@ use std::collections::BTreeMap;
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::settings::{Settings, Unchecked};
|
use meilisearch_types::settings::{Settings, Unchecked};
|
||||||
@ -11,6 +10,7 @@ use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
|
@ -3,11 +3,11 @@ use actix_web::web::{self, Data};
|
|||||||
use actix_web::{HttpRequest, HttpResponse};
|
use actix_web::{HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::AwebJson;
|
use deserr::actix_web::AwebJson;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::keys::actions;
|
use meilisearch_types::keys::actions;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::{Analytics, MultiSearchAggregator};
|
use crate::analytics::{Analytics, MultiSearchAggregator};
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
|
Loading…
Reference in New Issue
Block a user