Structures a bunch of logs

This commit is contained in:
Tamo 2024-02-07 17:55:40 +01:00 committed by Louis Dureuil
parent d71b77f18b
commit 08af0e690c
No known key found for this signature in database
14 changed files with 77 additions and 67 deletions

View File

@ -294,14 +294,14 @@ fn import_dump(
if let Some(date) = dump_reader.date() { if let Some(date) = dump_reader.date() {
tracing::info!( tracing::info!(
"Importing a dump of meilisearch `{:?}` from the {}", version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version %date,
date "Importing a dump of meilisearch"
); );
} else { } else {
tracing::info!( tracing::info!(
"Importing a dump of meilisearch `{:?}`", version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version "Importing a dump of meilisearch",
); );
} }
@ -384,7 +384,8 @@ fn import_dump(
)?; )?;
let (builder, user_result) = builder.add_documents(reader)?; let (builder, user_result) = builder.add_documents(reader)?;
tracing::info!("{} documents found.", user_result?); let user_result = user_result?;
tracing::info!(documents_found = user_result, "{} documents found.", user_result);
builder.execute()?; builder.execute()?;
wtxn.commit()?; wtxn.commit()?;
tracing::info!("All documents successfully imported."); tracing::info!("All documents successfully imported.");

View File

@ -1,11 +1,11 @@
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler; use index_scheduler::IndexScheduler;
use tracing::debug;
use meilisearch_auth::AuthController; use meilisearch_auth::AuthController;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::tasks::KindWithContent; use meilisearch_types::tasks::KindWithContent;
use serde_json::json; use serde_json::json;
use tracing::debug_span;
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
@ -32,6 +32,6 @@ pub async fn create_dump(
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Create dump", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }

View File

@ -7,7 +7,7 @@ use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::keys::actions; use meilisearch_types::keys::actions;
use serde_json::json; use serde_json::json;
use tracing::debug; use tracing::{debug_span};
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::policies::ActionPolicy; use crate::extractors::authentication::policies::ActionPolicy;
@ -33,8 +33,9 @@ async fn get_features(
let features = index_scheduler.features(); let features = index_scheduler.features();
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req)); analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
debug!("returns: {:?}", features.runtime_features()); let features = features.runtime_features();
HttpResponse::Ok().json(features.runtime_features()) debug_span!("Get features", returns = ?features);
HttpResponse::Ok().json(features)
} }
#[derive(Debug, Deserr)] #[derive(Debug, Deserr)]
@ -60,6 +61,7 @@ async fn patch_features(
analytics: Data<dyn Analytics>, analytics: Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let features = index_scheduler.features(); let features = index_scheduler.features();
debug_span!("Patch features", parameters = ?new_features);
let old_features = features.runtime_features(); let old_features = features.runtime_features();
let new_features = meilisearch_types::features::RuntimeTogglableFeatures { let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
@ -93,5 +95,6 @@ async fn patch_features(
Some(&req), Some(&req),
); );
index_scheduler.put_runtime_features(new_features)?; index_scheduler.put_runtime_features(new_features)?;
debug_span!("Patch features", returns = ?new_features);
Ok(HttpResponse::Ok().json(new_features)) Ok(HttpResponse::Ok().json(new_features))
} }

View File

@ -27,7 +27,7 @@ use serde_json::Value;
use tempfile::tempfile; use tempfile::tempfile;
use tokio::fs::File; use tokio::fs::File;
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter}; use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
use tracing::debug; use tracing::{debug_span};
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind}; use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
use crate::error::MeilisearchHttpError; use crate::error::MeilisearchHttpError;
@ -101,6 +101,7 @@ pub async fn get_document(
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let DocumentParam { index_uid, document_id } = document_param.into_inner(); let DocumentParam { index_uid, document_id } = document_param.into_inner();
debug_span!("Get document", parameters = ?params);
let index_uid = IndexUid::try_from(index_uid)?; let index_uid = IndexUid::try_from(index_uid)?;
analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req); analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req);
@ -110,7 +111,7 @@ pub async fn get_document(
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?; let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?;
debug!("returns: {:?}", document); debug_span!("Get document", returns = ?document);
Ok(HttpResponse::Ok().json(document)) Ok(HttpResponse::Ok().json(document))
} }
@ -131,7 +132,7 @@ pub async fn delete_document(
}; };
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Delete document", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -168,9 +169,8 @@ pub async fn documents_by_query_post(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!("called with body: {:?}", body);
let body = body.into_inner(); let body = body.into_inner();
debug_span!("Get documents POST", parameters = ?body);
analytics.post_fetch_documents( analytics.post_fetch_documents(
&DocumentFetchKind::Normal { &DocumentFetchKind::Normal {
@ -191,7 +191,7 @@ pub async fn get_documents(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params); debug_span!("Get documents GET", parameters = ?params);
let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner(); let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner();
@ -235,7 +235,7 @@ fn documents_by_query(
let ret = PaginationView::new(offset, limit, total as usize, documents); let ret = PaginationView::new(offset, limit, total as usize, documents);
debug!("returns: {:?}", ret); debug_span!("Get documents", returns = ?ret);
Ok(HttpResponse::Ok().json(ret)) Ok(HttpResponse::Ok().json(ret))
} }
@ -271,7 +271,7 @@ pub async fn replace_documents(
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
debug!("called with params: {:?}", params); debug_span!("Replace documents", parameters = ?params);
let params = params.into_inner(); let params = params.into_inner();
analytics.add_documents(&params, index_scheduler.index(&index_uid).is_err(), &req); analytics.add_documents(&params, index_scheduler.index(&index_uid).is_err(), &req);
@ -288,6 +288,7 @@ pub async fn replace_documents(
allow_index_creation, allow_index_creation,
) )
.await?; .await?;
debug_span!("Replace documents", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -302,8 +303,8 @@ pub async fn update_documents(
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
debug!("called with params: {:?}", params);
let params = params.into_inner(); let params = params.into_inner();
debug_span!("Update documents", parameters = ?params);
analytics.update_documents(&params, index_scheduler.index(&index_uid).is_err(), &req); analytics.update_documents(&params, index_scheduler.index(&index_uid).is_err(), &req);
@ -319,6 +320,7 @@ pub async fn update_documents(
allow_index_creation, allow_index_creation,
) )
.await?; .await?;
debug_span!("Update documents", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -427,7 +429,10 @@ async fn document_addition(
Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e))) Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e)))
if e.kind() == ErrorKind::NotFound => {} if e.kind() == ErrorKind::NotFound => {}
Err(e) => { Err(e) => {
tracing::warn!("Unknown error happened while deleting a malformed update file with uuid {uuid}: {e}"); tracing::warn!(
index_uuid = %uuid,
"Unknown error happened while deleting a malformed update file: {e}"
);
} }
} }
// We still want to return the original error to the end user. // We still want to return the original error to the end user.
@ -453,7 +458,6 @@ async fn document_addition(
} }
}; };
debug!("returns: {:?}", task);
Ok(task.into()) Ok(task.into())
} }
@ -464,7 +468,7 @@ pub async fn delete_documents_batch(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", body); debug_span!("Delete documents by batch", parameters = ?body);
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req); analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
@ -479,7 +483,7 @@ pub async fn delete_documents_batch(
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Delete documents by batch", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -497,7 +501,7 @@ pub async fn delete_documents_by_filter(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", body); debug_span!("Delete documents by filter", parameters = ?body);
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let index_uid = index_uid.into_inner(); let index_uid = index_uid.into_inner();
let filter = body.into_inner().filter; let filter = body.into_inner().filter;
@ -515,7 +519,7 @@ pub async fn delete_documents_by_filter(
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Delete documents by filter", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -532,7 +536,7 @@ pub async fn clear_all_documents(
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Delete all documents", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }

View File

@ -7,7 +7,7 @@ use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid; use meilisearch_types::index_uid::IndexUid;
use serde_json::Value; use serde_json::Value;
use tracing::debug; use tracing::debug_span;
use crate::analytics::{Analytics, FacetSearchAggregator}; use crate::analytics::{Analytics, FacetSearchAggregator};
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
@ -56,7 +56,7 @@ pub async fn search(
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let query = params.into_inner(); let query = params.into_inner();
debug!("facet search called with params: {:?}", query); debug_span!("Facet search", parameters = ?query);
let mut aggregate = FacetSearchAggregator::from_query(&query, &req); let mut aggregate = FacetSearchAggregator::from_query(&query, &req);
@ -83,7 +83,7 @@ pub async fn search(
let search_result = search_result?; let search_result = search_result?;
debug!("returns: {:?}", search_result); debug_span!("Facet search", returns = ?search_result);
Ok(HttpResponse::Ok().json(search_result)) Ok(HttpResponse::Ok().json(search_result))
} }

View File

@ -15,7 +15,7 @@ use meilisearch_types::tasks::KindWithContent;
use serde::Serialize; use serde::Serialize;
use serde_json::json; use serde_json::json;
use time::OffsetDateTime; use time::OffsetDateTime;
use tracing::debug; use tracing::debug_span;
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT}; use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
use crate::analytics::Analytics; use crate::analytics::Analytics;
@ -93,6 +93,7 @@ pub async fn list_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>, paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug_span!("List indexes", parameters = ?paginate);
let filters = index_scheduler.filters(); let filters = index_scheduler.filters();
let indexes: Vec<Option<IndexView>> = let indexes: Vec<Option<IndexView>> =
index_scheduler.try_for_each_index(|uid, index| -> Result<Option<IndexView>, _> { index_scheduler.try_for_each_index(|uid, index| -> Result<Option<IndexView>, _> {
@ -105,7 +106,7 @@ pub async fn list_indexes(
let indexes: Vec<IndexView> = indexes.into_iter().flatten().collect(); let indexes: Vec<IndexView> = indexes.into_iter().flatten().collect();
let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter()); let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter());
debug!("returns: {:?}", ret); debug_span!("List indexes", returns = ?ret);
Ok(HttpResponse::Ok().json(ret)) Ok(HttpResponse::Ok().json(ret))
} }
@ -124,6 +125,7 @@ pub async fn create_index(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug_span!("Create index", parameters = ?body);
let IndexCreateRequest { primary_key, uid } = body.into_inner(); let IndexCreateRequest { primary_key, uid } = body.into_inner();
let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid); let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid);
@ -137,6 +139,7 @@ pub async fn create_index(
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key }; let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug_span!("Create index", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} else { } else {
@ -177,7 +180,7 @@ pub async fn get_index(
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let index_view = IndexView::new(index_uid.into_inner(), &index)?; let index_view = IndexView::new(index_uid.into_inner(), &index)?;
debug!("returns: {:?}", index_view); debug_span!("Get index", returns = ?index_view);
Ok(HttpResponse::Ok().json(index_view)) Ok(HttpResponse::Ok().json(index_view))
} }
@ -189,7 +192,7 @@ pub async fn update_index(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", body); debug_span!("Update index", parameters = ?body);
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let body = body.into_inner(); let body = body.into_inner();
analytics.publish( analytics.publish(
@ -206,7 +209,7 @@ pub async fn update_index(
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Update index", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -218,6 +221,7 @@ pub async fn delete_index(
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() }; let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug_span!("Delete index", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -255,6 +259,6 @@ pub async fn get_index_stats(
let stats = IndexStats::from(index_scheduler.index_stats(&index_uid)?); let stats = IndexStats::from(index_scheduler.index_stats(&index_uid)?);
debug!("returns: {:?}", stats); debug_span!("Get index stats", returns = ?stats);
Ok(HttpResponse::Ok().json(stats)) Ok(HttpResponse::Ok().json(stats))
} }

View File

@ -11,7 +11,7 @@ use meilisearch_types::milli;
use meilisearch_types::milli::vector::DistributionShift; use meilisearch_types::milli::vector::DistributionShift;
use meilisearch_types::serde_cs::vec::CS; use meilisearch_types::serde_cs::vec::CS;
use serde_json::Value; use serde_json::Value;
use tracing::{debug, warn}; use tracing::{debug_span, warn};
use crate::analytics::{Analytics, SearchAggregator}; use crate::analytics::{Analytics, SearchAggregator};
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
@ -186,7 +186,7 @@ pub async fn search_with_url_query(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params); debug_span!("Search get", parameters = ?params);
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let mut query: SearchQuery = params.into_inner().into(); let mut query: SearchQuery = params.into_inner().into();
@ -213,7 +213,7 @@ pub async fn search_with_url_query(
let search_result = search_result?; let search_result = search_result?;
debug!("returns: {:?}", search_result); debug_span!("Search get", returns = ?search_result);
Ok(HttpResponse::Ok().json(search_result)) Ok(HttpResponse::Ok().json(search_result))
} }
@ -227,7 +227,7 @@ pub async fn search_with_post(
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let mut query = params.into_inner(); let mut query = params.into_inner();
debug!("search called with params: {:?}", query); debug_span!("Search post", parameters = ?query);
// Tenant token search_rules. // Tenant token search_rules.
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) { if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
@ -252,7 +252,7 @@ pub async fn search_with_post(
let search_result = search_result?; let search_result = search_result?;
debug!("returns: {:?}", search_result); debug_span!("Search post", returns = ?search_result);
Ok(HttpResponse::Ok().json(search_result)) Ok(HttpResponse::Ok().json(search_result))
} }

View File

@ -10,7 +10,7 @@ use meilisearch_types::milli::update::Setting;
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked}; use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
use meilisearch_types::tasks::KindWithContent; use meilisearch_types::tasks::KindWithContent;
use serde_json::json; use serde_json::json;
use tracing::debug; use tracing::{debug_span};
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
@ -29,7 +29,7 @@ macro_rules! make_setting_route {
use meilisearch_types::milli::update::Setting; use meilisearch_types::milli::update::Setting;
use meilisearch_types::settings::{settings, Settings}; use meilisearch_types::settings::{settings, Settings};
use meilisearch_types::tasks::KindWithContent; use meilisearch_types::tasks::KindWithContent;
use tracing::debug; use tracing::debug_span;
use $crate::analytics::Analytics; use $crate::analytics::Analytics;
use $crate::extractors::authentication::policies::*; use $crate::extractors::authentication::policies::*;
use $crate::extractors::authentication::GuardedData; use $crate::extractors::authentication::GuardedData;
@ -61,7 +61,7 @@ macro_rules! make_setting_route {
.await?? .await??
.into(); .into();
debug!("returns: {:?}", task); debug_span!("Delete settings", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -78,6 +78,7 @@ macro_rules! make_setting_route {
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let body = body.into_inner(); let body = body.into_inner();
debug_span!("Update settings", parameters = ?body);
#[allow(clippy::redundant_closure_call)] #[allow(clippy::redundant_closure_call)]
$analytics(&body, &req); $analytics(&body, &req);
@ -109,7 +110,7 @@ macro_rules! make_setting_route {
.await?? .await??
.into(); .into();
debug!("returns: {:?}", task); debug_span!("Update settings", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -126,7 +127,7 @@ macro_rules! make_setting_route {
let rtxn = index.read_txn()?; let rtxn = index.read_txn()?;
let settings = settings(&index, &rtxn)?; let settings = settings(&index, &rtxn)?;
debug!("returns: {:?}", settings); debug_span!("Update settings", returns = ?settings);
let mut json = serde_json::json!(&settings); let mut json = serde_json::json!(&settings);
let val = json[$camelcase_attr].take(); let val = json[$camelcase_attr].take();
@ -656,6 +657,7 @@ pub async fn update_all(
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let new_settings = body.into_inner(); let new_settings = body.into_inner();
debug_span!("Update all settings", parameters = ?new_settings);
let new_settings = validate_settings(new_settings, &index_scheduler)?; let new_settings = validate_settings(new_settings, &index_scheduler)?;
analytics.publish( analytics.publish(
@ -768,7 +770,7 @@ pub async fn update_all(
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Update all settings", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
@ -781,7 +783,7 @@ pub async fn get_all(
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let rtxn = index.read_txn()?; let rtxn = index.read_txn()?;
let new_settings = settings(&index, &rtxn)?; let new_settings = settings(&index, &rtxn)?;
debug!("returns: {:?}", new_settings); debug_span!("Get all settings", returns = ?new_settings);
Ok(HttpResponse::Ok().json(new_settings)) Ok(HttpResponse::Ok().json(new_settings))
} }
@ -804,7 +806,7 @@ pub async fn delete_all(
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Delete all settings", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }

View File

@ -10,7 +10,7 @@ use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use time::OffsetDateTime; use time::OffsetDateTime;
use tracing::debug; use tracing::{debug_span};
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
@ -252,7 +252,7 @@ async fn get_stats(
let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?; let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?;
debug!("returns: {:?}", stats); debug_span!("Get stats", returns = ?stats);
Ok(HttpResponse::Ok().json(stats)) Ok(HttpResponse::Ok().json(stats))
} }

View File

@ -7,7 +7,7 @@ use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::keys::actions; use meilisearch_types::keys::actions;
use serde::Serialize; use serde::Serialize;
use tracing::debug; use tracing::{debug_span};
use crate::analytics::{Analytics, MultiSearchAggregator}; use crate::analytics::{Analytics, MultiSearchAggregator};
use crate::extractors::authentication::policies::ActionPolicy; use crate::extractors::authentication::policies::ActionPolicy;
@ -52,7 +52,7 @@ pub async fn multi_search_with_post(
for (query_index, (index_uid, mut query)) in for (query_index, (index_uid, mut query)) in
queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate() queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate()
{ {
debug!("multi-search #{query_index}: called with params: {:?}", query); debug_span!("Multi-search", on_index = query_index, parameters = ?query);
// Check index from API key // Check index from API key
if !index_scheduler.filters().is_index_authorized(&index_uid) { if !index_scheduler.filters().is_index_authorized(&index_uid) {
@ -107,7 +107,7 @@ pub async fn multi_search_with_post(
err err
})?; })?;
debug!("returns: {:?}", search_results); debug_span!("Multi-search", returns = ?search_results);
Ok(HttpResponse::Ok().json(SearchResults { results: search_results })) Ok(HttpResponse::Ok().json(SearchResults { results: search_results }))
} }

View File

@ -4,7 +4,7 @@ use index_scheduler::IndexScheduler;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::tasks::KindWithContent; use meilisearch_types::tasks::KindWithContent;
use serde_json::json; use serde_json::json;
use tracing::debug; use tracing::debug_span;
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*; use crate::extractors::authentication::policies::*;
@ -27,6 +27,6 @@ pub async fn create_snapshot(
let task: SummarizedTaskView = let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task); debug_span!("Create snapshot", returns = ?task);
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }

View File

@ -263,7 +263,6 @@ async fn get_tasks(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<dyn Analytics>, analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
tracing::info!("You called tasks");
let mut params = params.into_inner(); let mut params = params.into_inner();
analytics.get_tasks(&params, &req); analytics.get_tasks(&params, &req);

View File

@ -15,7 +15,7 @@ use std::io::BufReader;
use crossbeam_channel::Sender; use crossbeam_channel::Sender;
use rayon::prelude::*; use rayon::prelude::*;
use tracing::debug; use tracing::{debug_span};
use self::extract_docid_word_positions::extract_docid_word_positions; use self::extract_docid_word_positions::extract_docid_word_positions;
use self::extract_facet_number_docids::extract_facet_number_docids; use self::extract_facet_number_docids::extract_facet_number_docids;
@ -114,7 +114,7 @@ pub(crate) fn data_from_obkv_documents(
{ {
let lmdb_writer_sx = lmdb_writer_sx.clone(); let lmdb_writer_sx = lmdb_writer_sx.clone();
rayon::spawn(move || { rayon::spawn(move || {
debug!("merge {} database", "facet-id-exists-docids"); debug_span!("merge", database = "facet-id-exists-docids");
match facet_exists_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { match facet_exists_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
Ok(reader) => { Ok(reader) => {
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetExistsDocids(reader))); let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetExistsDocids(reader)));
@ -130,7 +130,7 @@ pub(crate) fn data_from_obkv_documents(
{ {
let lmdb_writer_sx = lmdb_writer_sx.clone(); let lmdb_writer_sx = lmdb_writer_sx.clone();
rayon::spawn(move || { rayon::spawn(move || {
debug!("merge {} database", "facet-id-is-null-docids"); debug_span!("merge", database = "facet-id-is-null-docids");
match facet_is_null_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { match facet_is_null_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
Ok(reader) => { Ok(reader) => {
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader))); let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader)));
@ -146,7 +146,7 @@ pub(crate) fn data_from_obkv_documents(
{ {
let lmdb_writer_sx = lmdb_writer_sx.clone(); let lmdb_writer_sx = lmdb_writer_sx.clone();
rayon::spawn(move || { rayon::spawn(move || {
debug!("merge {} database", "facet-id-is-empty-docids"); debug_span!("merge", database = "facet-id-is-empty-docids");
match facet_is_empty_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { match facet_is_empty_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
Ok(reader) => { Ok(reader) => {
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader))); let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader)));
@ -272,7 +272,7 @@ fn spawn_extraction_task<FE, FS, M>(
Ok(chunks) => { Ok(chunks) => {
let child_span = tracing::trace_span!(target: "indexing::details", parent: &current_span, "merge_multiple_chunks"); let child_span = tracing::trace_span!(target: "indexing::details", parent: &current_span, "merge_multiple_chunks");
let _entered = child_span.enter(); let _entered = child_span.enter();
debug!("merge {} database", name); debug_span!("merge", database = name);
puffin::profile_scope!("merge_multiple_chunks", name); puffin::profile_scope!("merge_multiple_chunks", name);
let reader = chunks.merge(merge_fn, &indexer); let reader = chunks.merge(merge_fn, &indexer);
let _ = lmdb_writer_sx.send(reader.map(serialize_fn)); let _ = lmdb_writer_sx.send(reader.map(serialize_fn));

View File

@ -17,7 +17,7 @@ use rand::SeedableRng;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use slice_group_by::GroupBy; use slice_group_by::GroupBy;
use tracing::debug; use tracing::{debug_span};
use typed_chunk::{write_typed_chunk_into_index, TypedChunk}; use typed_chunk::{write_typed_chunk_into_index, TypedChunk};
use self::enrich::enrich_documents_batch; use self::enrich::enrich_documents_batch;
@ -506,10 +506,7 @@ where
documents_seen: documents_seen_count as usize, documents_seen: documents_seen_count as usize,
total_documents: documents_count, total_documents: documents_count,
}); });
debug!( debug_span!("Seen", documents = documents_seen_count, total = documents_count);
"We have seen {} documents on {} total document so far",
documents_seen_count, documents_count
);
} }
if is_merged_database { if is_merged_database {
databases_seen += 1; databases_seen += 1;