diff --git a/meilisearch/src/lib.rs b/meilisearch/src/lib.rs index 2f7305365..711a11a0a 100644 --- a/meilisearch/src/lib.rs +++ b/meilisearch/src/lib.rs @@ -294,14 +294,14 @@ fn import_dump( if let Some(date) = dump_reader.date() { tracing::info!( - "Importing a dump of meilisearch `{:?}` from the {}", - dump_reader.version(), // TODO: get the meilisearch version instead of the dump version - date + version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version + %date, + "Importing a dump of meilisearch" ); } else { tracing::info!( - "Importing a dump of meilisearch `{:?}`", - dump_reader.version(), // TODO: get the meilisearch version instead of the dump version + version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version + "Importing a dump of meilisearch", ); } @@ -384,7 +384,8 @@ fn import_dump( )?; let (builder, user_result) = builder.add_documents(reader)?; - tracing::info!("{} documents found.", user_result?); + let user_result = user_result?; + tracing::info!(documents_found = user_result, "{} documents found.", user_result); builder.execute()?; wtxn.commit()?; tracing::info!("All documents successfully imported."); diff --git a/meilisearch/src/routes/dump.rs b/meilisearch/src/routes/dump.rs index 9f55a3a9a..b604985fc 100644 --- a/meilisearch/src/routes/dump.rs +++ b/meilisearch/src/routes/dump.rs @@ -1,11 +1,11 @@ use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::IndexScheduler; -use tracing::debug; use meilisearch_auth::AuthController; use meilisearch_types::error::ResponseError; use meilisearch_types::tasks::KindWithContent; use serde_json::json; +use tracing::debug_span; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -32,6 +32,6 @@ pub async fn create_dump( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Create dump", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } diff --git a/meilisearch/src/routes/features.rs b/meilisearch/src/routes/features.rs index 132460159..cc29b8995 100644 --- a/meilisearch/src/routes/features.rs +++ b/meilisearch/src/routes/features.rs @@ -7,7 +7,7 @@ use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::ResponseError; use meilisearch_types::keys::actions; use serde_json::json; -use tracing::debug; +use tracing::{debug_span}; use crate::analytics::Analytics; use crate::extractors::authentication::policies::ActionPolicy; @@ -33,8 +33,9 @@ async fn get_features( let features = index_scheduler.features(); analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req)); - debug!("returns: {:?}", features.runtime_features()); - HttpResponse::Ok().json(features.runtime_features()) + let features = features.runtime_features(); + debug_span!("Get features", returns = ?features); + HttpResponse::Ok().json(features) } #[derive(Debug, Deserr)] @@ -60,6 +61,7 @@ async fn patch_features( analytics: Data, ) -> Result { let features = index_scheduler.features(); + debug_span!("Patch features", parameters = ?new_features); let old_features = features.runtime_features(); let new_features = meilisearch_types::features::RuntimeTogglableFeatures { @@ -93,5 +95,6 @@ async fn patch_features( Some(&req), ); index_scheduler.put_runtime_features(new_features)?; + debug_span!("Patch features", returns = ?new_features); Ok(HttpResponse::Ok().json(new_features)) } diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index f1124ac3e..19d617c27 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -27,7 +27,7 @@ use serde_json::Value; use tempfile::tempfile; use tokio::fs::File; use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter}; -use tracing::debug; +use tracing::{debug_span}; use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind}; use crate::error::MeilisearchHttpError; @@ -101,6 +101,7 @@ pub async fn get_document( analytics: web::Data, ) -> Result { let DocumentParam { index_uid, document_id } = document_param.into_inner(); + debug_span!("Get document", parameters = ?params); let index_uid = IndexUid::try_from(index_uid)?; analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req); @@ -110,7 +111,7 @@ pub async fn get_document( let index = index_scheduler.index(&index_uid)?; let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?; - debug!("returns: {:?}", document); + debug_span!("Get document", returns = ?document); Ok(HttpResponse::Ok().json(document)) } @@ -131,7 +132,7 @@ pub async fn delete_document( }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Delete document", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -168,9 +169,8 @@ pub async fn documents_by_query_post( req: HttpRequest, analytics: web::Data, ) -> Result { - debug!("called with body: {:?}", body); - let body = body.into_inner(); + debug_span!("Get documents POST", parameters = ?body); analytics.post_fetch_documents( &DocumentFetchKind::Normal { @@ -191,7 +191,7 @@ pub async fn get_documents( req: HttpRequest, analytics: web::Data, ) -> Result { - debug!("called with params: {:?}", params); + debug_span!("Get documents GET", parameters = ?params); let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner(); @@ -235,7 +235,7 @@ fn documents_by_query( let ret = PaginationView::new(offset, limit, total as usize, documents); - debug!("returns: {:?}", ret); + debug_span!("Get documents", returns = ?ret); Ok(HttpResponse::Ok().json(ret)) } @@ -271,7 +271,7 @@ pub async fn replace_documents( ) -> Result { let index_uid = IndexUid::try_from(index_uid.into_inner())?; - debug!("called with params: {:?}", params); + debug_span!("Replace documents", parameters = ?params); let params = params.into_inner(); analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req); @@ -288,6 +288,7 @@ pub async fn replace_documents( allow_index_creation, ) .await?; + debug_span!("Replace documents", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -302,8 +303,8 @@ pub async fn update_documents( ) -> Result { let index_uid = IndexUid::try_from(index_uid.into_inner())?; - debug!("called with params: {:?}", params); let params = params.into_inner(); + debug_span!("Update documents", parameters = ?params); analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req); @@ -319,6 +320,7 @@ pub async fn update_documents( allow_index_creation, ) .await?; + debug_span!("Update documents", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -427,7 +429,10 @@ async fn document_addition( Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e))) if e.kind() == ErrorKind::NotFound => {} Err(e) => { - tracing::warn!("Unknown error happened while deleting a malformed update file with uuid {uuid}: {e}"); + tracing::warn!( + index_uuid = %uuid, + "Unknown error happened while deleting a malformed update file: {e}" + ); } } // We still want to return the original error to the end user. @@ -453,7 +458,6 @@ async fn document_addition( } }; - debug!("returns: {:?}", task); Ok(task.into()) } @@ -464,7 +468,7 @@ pub async fn delete_documents_batch( req: HttpRequest, analytics: web::Data, ) -> Result { - debug!("called with params: {:?}", body); + debug_span!("Delete documents by batch", parameters = ?body); let index_uid = IndexUid::try_from(index_uid.into_inner())?; analytics.delete_documents(DocumentDeletionKind::PerBatch, &req); @@ -479,7 +483,7 @@ pub async fn delete_documents_batch( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Delete documents by batch", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -497,7 +501,7 @@ pub async fn delete_documents_by_filter( req: HttpRequest, analytics: web::Data, ) -> Result { - debug!("called with params: {:?}", body); + debug_span!("Delete documents by filter", parameters = ?body); let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = index_uid.into_inner(); let filter = body.into_inner().filter; @@ -515,7 +519,7 @@ pub async fn delete_documents_by_filter( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Delete documents by filter", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -532,7 +536,7 @@ pub async fn clear_all_documents( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Delete all documents", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } diff --git a/meilisearch/src/routes/indexes/facet_search.rs b/meilisearch/src/routes/indexes/facet_search.rs index dbd84281f..21746177d 100644 --- a/meilisearch/src/routes/indexes/facet_search.rs +++ b/meilisearch/src/routes/indexes/facet_search.rs @@ -7,7 +7,7 @@ use meilisearch_types::error::deserr_codes::*; use meilisearch_types::error::ResponseError; use meilisearch_types::index_uid::IndexUid; use serde_json::Value; -use tracing::debug; +use tracing::debug_span; use crate::analytics::{Analytics, FacetSearchAggregator}; use crate::extractors::authentication::policies::*; @@ -56,7 +56,7 @@ pub async fn search( let index_uid = IndexUid::try_from(index_uid.into_inner())?; let query = params.into_inner(); - debug!("facet search called with params: {:?}", query); + debug_span!("Facet search", parameters = ?query); let mut aggregate = FacetSearchAggregator::from_query(&query, &req); @@ -83,7 +83,7 @@ pub async fn search( let search_result = search_result?; - debug!("returns: {:?}", search_result); + debug_span!("Facet search", returns = ?search_result); Ok(HttpResponse::Ok().json(search_result)) } diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index a071ff536..69eb207b8 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -15,7 +15,7 @@ use meilisearch_types::tasks::KindWithContent; use serde::Serialize; use serde_json::json; use time::OffsetDateTime; -use tracing::debug; +use tracing::debug_span; use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT}; use crate::analytics::Analytics; @@ -93,6 +93,7 @@ pub async fn list_indexes( index_scheduler: GuardedData, Data>, paginate: AwebQueryParameter, ) -> Result { + debug_span!("List indexes", parameters = ?paginate); let filters = index_scheduler.filters(); let indexes: Vec> = index_scheduler.try_for_each_index(|uid, index| -> Result, _> { @@ -105,7 +106,7 @@ pub async fn list_indexes( let indexes: Vec = indexes.into_iter().flatten().collect(); let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter()); - debug!("returns: {:?}", ret); + debug_span!("List indexes", returns = ?ret); Ok(HttpResponse::Ok().json(ret)) } @@ -124,6 +125,7 @@ pub async fn create_index( req: HttpRequest, analytics: web::Data, ) -> Result { + debug_span!("Create index", parameters = ?body); let IndexCreateRequest { primary_key, uid } = body.into_inner(); let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid); @@ -137,6 +139,7 @@ pub async fn create_index( let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); + debug_span!("Create index", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } else { @@ -177,7 +180,7 @@ pub async fn get_index( let index = index_scheduler.index(&index_uid)?; let index_view = IndexView::new(index_uid.into_inner(), &index)?; - debug!("returns: {:?}", index_view); + debug_span!("Get index", returns = ?index_view); Ok(HttpResponse::Ok().json(index_view)) } @@ -189,7 +192,7 @@ pub async fn update_index( req: HttpRequest, analytics: web::Data, ) -> Result { - debug!("called with params: {:?}", body); + debug_span!("Update index", parameters = ?body); let index_uid = IndexUid::try_from(index_uid.into_inner())?; let body = body.into_inner(); analytics.publish( @@ -206,7 +209,7 @@ pub async fn update_index( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Update index", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -218,6 +221,7 @@ pub async fn delete_index( let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); + debug_span!("Delete index", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -255,6 +259,6 @@ pub async fn get_index_stats( let stats = IndexStats::from(index_scheduler.index_stats(&index_uid)?); - debug!("returns: {:?}", stats); + debug_span!("Get index stats", returns = ?stats); Ok(HttpResponse::Ok().json(stats)) } diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 061475798..83b496d0a 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -11,7 +11,7 @@ use meilisearch_types::milli; use meilisearch_types::milli::vector::DistributionShift; use meilisearch_types::serde_cs::vec::CS; use serde_json::Value; -use tracing::{debug, warn}; +use tracing::{debug_span, warn}; use crate::analytics::{Analytics, SearchAggregator}; use crate::extractors::authentication::policies::*; @@ -186,7 +186,7 @@ pub async fn search_with_url_query( req: HttpRequest, analytics: web::Data, ) -> Result { - debug!("called with params: {:?}", params); + debug_span!("Search get", parameters = ?params); let index_uid = IndexUid::try_from(index_uid.into_inner())?; let mut query: SearchQuery = params.into_inner().into(); @@ -213,7 +213,7 @@ pub async fn search_with_url_query( let search_result = search_result?; - debug!("returns: {:?}", search_result); + debug_span!("Search get", returns = ?search_result); Ok(HttpResponse::Ok().json(search_result)) } @@ -227,7 +227,7 @@ pub async fn search_with_post( let index_uid = IndexUid::try_from(index_uid.into_inner())?; let mut query = params.into_inner(); - debug!("search called with params: {:?}", query); + debug_span!("Search post", parameters = ?query); // Tenant token search_rules. if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) { @@ -252,7 +252,7 @@ pub async fn search_with_post( let search_result = search_result?; - debug!("returns: {:?}", search_result); + debug_span!("Search post", returns = ?search_result); Ok(HttpResponse::Ok().json(search_result)) } diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 35f58e13c..1613b8813 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -10,7 +10,7 @@ use meilisearch_types::milli::update::Setting; use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked}; use meilisearch_types::tasks::KindWithContent; use serde_json::json; -use tracing::debug; +use tracing::{debug_span}; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -29,7 +29,7 @@ macro_rules! make_setting_route { use meilisearch_types::milli::update::Setting; use meilisearch_types::settings::{settings, Settings}; use meilisearch_types::tasks::KindWithContent; - use tracing::debug; + use tracing::debug_span; use $crate::analytics::Analytics; use $crate::extractors::authentication::policies::*; use $crate::extractors::authentication::GuardedData; @@ -61,7 +61,7 @@ macro_rules! make_setting_route { .await?? .into(); - debug!("returns: {:?}", task); + debug_span!("Delete settings", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -78,6 +78,7 @@ macro_rules! make_setting_route { let index_uid = IndexUid::try_from(index_uid.into_inner())?; let body = body.into_inner(); + debug_span!("Update settings", parameters = ?body); #[allow(clippy::redundant_closure_call)] $analytics(&body, &req); @@ -109,7 +110,7 @@ macro_rules! make_setting_route { .await?? .into(); - debug!("returns: {:?}", task); + debug_span!("Update settings", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -126,7 +127,7 @@ macro_rules! make_setting_route { let rtxn = index.read_txn()?; let settings = settings(&index, &rtxn)?; - debug!("returns: {:?}", settings); + debug_span!("Update settings", returns = ?settings); let mut json = serde_json::json!(&settings); let val = json[$camelcase_attr].take(); @@ -656,6 +657,7 @@ pub async fn update_all( let index_uid = IndexUid::try_from(index_uid.into_inner())?; let new_settings = body.into_inner(); + debug_span!("Update all settings", parameters = ?new_settings); let new_settings = validate_settings(new_settings, &index_scheduler)?; analytics.publish( @@ -768,7 +770,7 @@ pub async fn update_all( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Update all settings", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } @@ -781,7 +783,7 @@ pub async fn get_all( let index = index_scheduler.index(&index_uid)?; let rtxn = index.read_txn()?; let new_settings = settings(&index, &rtxn)?; - debug!("returns: {:?}", new_settings); + debug_span!("Get all settings", returns = ?new_settings); Ok(HttpResponse::Ok().json(new_settings)) } @@ -804,7 +806,7 @@ pub async fn delete_all( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Delete all settings", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index 19feac324..ed0ccd8db 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -10,7 +10,7 @@ use meilisearch_types::tasks::{Kind, Status, Task, TaskId}; use serde::{Deserialize, Serialize}; use serde_json::json; use time::OffsetDateTime; -use tracing::debug; +use tracing::{debug_span}; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -252,7 +252,7 @@ async fn get_stats( let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?; - debug!("returns: {:?}", stats); + debug_span!("Get stats", returns = ?stats); Ok(HttpResponse::Ok().json(stats)) } diff --git a/meilisearch/src/routes/multi_search.rs b/meilisearch/src/routes/multi_search.rs index b09841fab..2a369bed1 100644 --- a/meilisearch/src/routes/multi_search.rs +++ b/meilisearch/src/routes/multi_search.rs @@ -7,7 +7,7 @@ use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::ResponseError; use meilisearch_types::keys::actions; use serde::Serialize; -use tracing::debug; +use tracing::{debug_span}; use crate::analytics::{Analytics, MultiSearchAggregator}; use crate::extractors::authentication::policies::ActionPolicy; @@ -52,7 +52,7 @@ pub async fn multi_search_with_post( for (query_index, (index_uid, mut query)) in queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate() { - debug!("multi-search #{query_index}: called with params: {:?}", query); + debug_span!("Multi-search", on_index = query_index, parameters = ?query); // Check index from API key if !index_scheduler.filters().is_index_authorized(&index_uid) { @@ -107,7 +107,7 @@ pub async fn multi_search_with_post( err })?; - debug!("returns: {:?}", search_results); + debug_span!("Multi-search", returns = ?search_results); Ok(HttpResponse::Ok().json(SearchResults { results: search_results })) } diff --git a/meilisearch/src/routes/snapshot.rs b/meilisearch/src/routes/snapshot.rs index 001df2150..4f329d251 100644 --- a/meilisearch/src/routes/snapshot.rs +++ b/meilisearch/src/routes/snapshot.rs @@ -4,7 +4,7 @@ use index_scheduler::IndexScheduler; use meilisearch_types::error::ResponseError; use meilisearch_types::tasks::KindWithContent; use serde_json::json; -use tracing::debug; +use tracing::debug_span; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -27,6 +27,6 @@ pub async fn create_snapshot( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug!("returns: {:?}", task); + debug_span!("Create snapshot", returns = ?task); Ok(HttpResponse::Accepted().json(task)) } diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index 9bb5892b5..03b63001d 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -263,7 +263,6 @@ async fn get_tasks( req: HttpRequest, analytics: web::Data, ) -> Result { - tracing::info!("You called tasks"); let mut params = params.into_inner(); analytics.get_tasks(¶ms, &req); diff --git a/milli/src/update/index_documents/extract/mod.rs b/milli/src/update/index_documents/extract/mod.rs index 357cdf8d7..64dc0b7db 100644 --- a/milli/src/update/index_documents/extract/mod.rs +++ b/milli/src/update/index_documents/extract/mod.rs @@ -15,7 +15,7 @@ use std::io::BufReader; use crossbeam_channel::Sender; use rayon::prelude::*; -use tracing::debug; +use tracing::{debug_span}; use self::extract_docid_word_positions::extract_docid_word_positions; use self::extract_facet_number_docids::extract_facet_number_docids; @@ -114,7 +114,7 @@ pub(crate) fn data_from_obkv_documents( { let lmdb_writer_sx = lmdb_writer_sx.clone(); rayon::spawn(move || { - debug!("merge {} database", "facet-id-exists-docids"); + debug_span!("merge", database = "facet-id-exists-docids"); match facet_exists_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { Ok(reader) => { let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetExistsDocids(reader))); @@ -130,7 +130,7 @@ pub(crate) fn data_from_obkv_documents( { let lmdb_writer_sx = lmdb_writer_sx.clone(); rayon::spawn(move || { - debug!("merge {} database", "facet-id-is-null-docids"); + debug_span!("merge", database = "facet-id-is-null-docids"); match facet_is_null_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { Ok(reader) => { let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader))); @@ -146,7 +146,7 @@ pub(crate) fn data_from_obkv_documents( { let lmdb_writer_sx = lmdb_writer_sx.clone(); rayon::spawn(move || { - debug!("merge {} database", "facet-id-is-empty-docids"); + debug_span!("merge", database = "facet-id-is-empty-docids"); match facet_is_empty_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { Ok(reader) => { let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader))); @@ -272,7 +272,7 @@ fn spawn_extraction_task( Ok(chunks) => { let child_span = tracing::trace_span!(target: "indexing::details", parent: ¤t_span, "merge_multiple_chunks"); let _entered = child_span.enter(); - debug!("merge {} database", name); + debug_span!("merge", database = name); puffin::profile_scope!("merge_multiple_chunks", name); let reader = chunks.merge(merge_fn, &indexer); let _ = lmdb_writer_sx.send(reader.map(serialize_fn)); diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index ca3d6bdd1..a4981823f 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -17,7 +17,7 @@ use rand::SeedableRng; use roaring::RoaringBitmap; use serde::{Deserialize, Serialize}; use slice_group_by::GroupBy; -use tracing::debug; +use tracing::{debug_span}; use typed_chunk::{write_typed_chunk_into_index, TypedChunk}; use self::enrich::enrich_documents_batch; @@ -506,10 +506,7 @@ where documents_seen: documents_seen_count as usize, total_documents: documents_count, }); - debug!( - "We have seen {} documents on {} total document so far", - documents_seen_count, documents_count - ); + debug_span!("Seen", documents = documents_seen_count, total = documents_count); } if is_merged_database { databases_seen += 1;