From 0b1bb42753f7ae68917430c9f25fec05b4f49d06 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 8 Feb 2024 10:14:50 +0100 Subject: [PATCH] use debug instead of debug_span --- meilisearch/src/routes/dump.rs | 4 +-- meilisearch/src/routes/features.rs | 8 ++--- meilisearch/src/routes/indexes/documents.rs | 32 +++++++++---------- .../src/routes/indexes/facet_search.rs | 6 ++-- meilisearch/src/routes/indexes/mod.rs | 20 ++++++------ meilisearch/src/routes/indexes/search.rs | 10 +++--- meilisearch/src/routes/indexes/settings.rs | 20 ++++++------ meilisearch/src/routes/mod.rs | 4 +-- meilisearch/src/routes/multi_search.rs | 6 ++-- meilisearch/src/routes/snapshot.rs | 4 +-- .../src/update/index_documents/extract/mod.rs | 19 +++++------ 11 files changed, 67 insertions(+), 66 deletions(-) diff --git a/meilisearch/src/routes/dump.rs b/meilisearch/src/routes/dump.rs index b604985fc..071ae60b8 100644 --- a/meilisearch/src/routes/dump.rs +++ b/meilisearch/src/routes/dump.rs @@ -5,7 +5,7 @@ use meilisearch_auth::AuthController; use meilisearch_types::error::ResponseError; use meilisearch_types::tasks::KindWithContent; use serde_json::json; -use tracing::debug_span; +use tracing::debug; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -32,6 +32,6 @@ pub async fn create_dump( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Create dump", returns = ?task); + debug!(returns = ?task, "Create dump"); Ok(HttpResponse::Accepted().json(task)) } diff --git a/meilisearch/src/routes/features.rs b/meilisearch/src/routes/features.rs index 7657ceef3..3c2c4d018 100644 --- a/meilisearch/src/routes/features.rs +++ b/meilisearch/src/routes/features.rs @@ -7,7 +7,7 @@ use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::ResponseError; use meilisearch_types::keys::actions; use serde_json::json; -use tracing::{debug_span}; +use tracing::debug; use crate::analytics::Analytics; use crate::extractors::authentication::policies::ActionPolicy; @@ -34,7 +34,7 @@ async fn get_features( analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req)); let features = features.runtime_features(); - debug_span!("Get features", returns = ?features); + debug!(returns = ?features, "Get features"); HttpResponse::Ok().json(features) } @@ -63,7 +63,7 @@ async fn patch_features( analytics: Data, ) -> Result { let features = index_scheduler.features(); - debug_span!("Patch features", parameters = ?new_features); + debug!(parameters = ?new_features, "Patch features"); let old_features = features.runtime_features(); let new_features = meilisearch_types::features::RuntimeTogglableFeatures { @@ -100,6 +100,6 @@ async fn patch_features( Some(&req), ); index_scheduler.put_runtime_features(new_features)?; - debug_span!("Patch features", returns = ?new_features); + debug!(returns = ?new_features, "Patch features"); Ok(HttpResponse::Ok().json(new_features)) } diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index 19d617c27..1f41fa10c 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -27,7 +27,7 @@ use serde_json::Value; use tempfile::tempfile; use tokio::fs::File; use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter}; -use tracing::{debug_span}; +use tracing::debug; use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind}; use crate::error::MeilisearchHttpError; @@ -101,7 +101,7 @@ pub async fn get_document( analytics: web::Data, ) -> Result { let DocumentParam { index_uid, document_id } = document_param.into_inner(); - debug_span!("Get document", parameters = ?params); + debug!(parameters = ?params, "Get document"); let index_uid = IndexUid::try_from(index_uid)?; analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req); @@ -111,7 +111,7 @@ pub async fn get_document( let index = index_scheduler.index(&index_uid)?; let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?; - debug_span!("Get document", returns = ?document); + debug!(returns = ?document, "Get document"); Ok(HttpResponse::Ok().json(document)) } @@ -132,7 +132,7 @@ pub async fn delete_document( }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Delete document", returns = ?task); + debug!(returns = ?task, "Delete document"); Ok(HttpResponse::Accepted().json(task)) } @@ -170,7 +170,7 @@ pub async fn documents_by_query_post( analytics: web::Data, ) -> Result { let body = body.into_inner(); - debug_span!("Get documents POST", parameters = ?body); + debug!(parameters = ?body, "Get documents POST"); analytics.post_fetch_documents( &DocumentFetchKind::Normal { @@ -191,7 +191,7 @@ pub async fn get_documents( req: HttpRequest, analytics: web::Data, ) -> Result { - debug_span!("Get documents GET", parameters = ?params); + debug!(parameters = ?params, "Get documents GET"); let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner(); @@ -235,7 +235,7 @@ fn documents_by_query( let ret = PaginationView::new(offset, limit, total as usize, documents); - debug_span!("Get documents", returns = ?ret); + debug!(returns = ?ret, "Get documents"); Ok(HttpResponse::Ok().json(ret)) } @@ -271,7 +271,7 @@ pub async fn replace_documents( ) -> Result { let index_uid = IndexUid::try_from(index_uid.into_inner())?; - debug_span!("Replace documents", parameters = ?params); + debug!(parameters = ?params, "Replace documents"); let params = params.into_inner(); analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req); @@ -288,7 +288,7 @@ pub async fn replace_documents( allow_index_creation, ) .await?; - debug_span!("Replace documents", returns = ?task); + debug!(returns = ?task, "Replace documents"); Ok(HttpResponse::Accepted().json(task)) } @@ -304,7 +304,7 @@ pub async fn update_documents( let index_uid = IndexUid::try_from(index_uid.into_inner())?; let params = params.into_inner(); - debug_span!("Update documents", parameters = ?params); + debug!(parameters = ?params, "Update documents"); analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req); @@ -320,7 +320,7 @@ pub async fn update_documents( allow_index_creation, ) .await?; - debug_span!("Update documents", returns = ?task); + debug!(returns = ?task, "Update documents"); Ok(HttpResponse::Accepted().json(task)) } @@ -468,7 +468,7 @@ pub async fn delete_documents_batch( req: HttpRequest, analytics: web::Data, ) -> Result { - debug_span!("Delete documents by batch", parameters = ?body); + debug!(parameters = ?body, "Delete documents by batch"); let index_uid = IndexUid::try_from(index_uid.into_inner())?; analytics.delete_documents(DocumentDeletionKind::PerBatch, &req); @@ -483,7 +483,7 @@ pub async fn delete_documents_batch( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Delete documents by batch", returns = ?task); + debug!(returns = ?task, "Delete documents by batch"); Ok(HttpResponse::Accepted().json(task)) } @@ -501,7 +501,7 @@ pub async fn delete_documents_by_filter( req: HttpRequest, analytics: web::Data, ) -> Result { - debug_span!("Delete documents by filter", parameters = ?body); + debug!(parameters = ?body, "Delete documents by filter"); let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = index_uid.into_inner(); let filter = body.into_inner().filter; @@ -519,7 +519,7 @@ pub async fn delete_documents_by_filter( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Delete documents by filter", returns = ?task); + debug!(returns = ?task, "Delete documents by filter"); Ok(HttpResponse::Accepted().json(task)) } @@ -536,7 +536,7 @@ pub async fn clear_all_documents( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Delete all documents", returns = ?task); + debug!(returns = ?task, "Delete all documents"); Ok(HttpResponse::Accepted().json(task)) } diff --git a/meilisearch/src/routes/indexes/facet_search.rs b/meilisearch/src/routes/indexes/facet_search.rs index 21746177d..a980fb278 100644 --- a/meilisearch/src/routes/indexes/facet_search.rs +++ b/meilisearch/src/routes/indexes/facet_search.rs @@ -7,7 +7,7 @@ use meilisearch_types::error::deserr_codes::*; use meilisearch_types::error::ResponseError; use meilisearch_types::index_uid::IndexUid; use serde_json::Value; -use tracing::debug_span; +use tracing::debug; use crate::analytics::{Analytics, FacetSearchAggregator}; use crate::extractors::authentication::policies::*; @@ -56,7 +56,7 @@ pub async fn search( let index_uid = IndexUid::try_from(index_uid.into_inner())?; let query = params.into_inner(); - debug_span!("Facet search", parameters = ?query); + debug!(parameters = ?query, "Facet search"); let mut aggregate = FacetSearchAggregator::from_query(&query, &req); @@ -83,7 +83,7 @@ pub async fn search( let search_result = search_result?; - debug_span!("Facet search", returns = ?search_result); + debug!(returns = ?search_result, "Facet search"); Ok(HttpResponse::Ok().json(search_result)) } diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index 69eb207b8..d80bd9c61 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -15,7 +15,7 @@ use meilisearch_types::tasks::KindWithContent; use serde::Serialize; use serde_json::json; use time::OffsetDateTime; -use tracing::debug_span; +use tracing::debug; use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT}; use crate::analytics::Analytics; @@ -93,7 +93,7 @@ pub async fn list_indexes( index_scheduler: GuardedData, Data>, paginate: AwebQueryParameter, ) -> Result { - debug_span!("List indexes", parameters = ?paginate); + debug!(parameters = ?paginate, "List indexes"); let filters = index_scheduler.filters(); let indexes: Vec> = index_scheduler.try_for_each_index(|uid, index| -> Result, _> { @@ -106,7 +106,7 @@ pub async fn list_indexes( let indexes: Vec = indexes.into_iter().flatten().collect(); let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter()); - debug_span!("List indexes", returns = ?ret); + debug!(returns = ?ret, "List indexes"); Ok(HttpResponse::Ok().json(ret)) } @@ -125,7 +125,7 @@ pub async fn create_index( req: HttpRequest, analytics: web::Data, ) -> Result { - debug_span!("Create index", parameters = ?body); + debug!(parameters = ?body, "Create index"); let IndexCreateRequest { primary_key, uid } = body.into_inner(); let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid); @@ -139,7 +139,7 @@ pub async fn create_index( let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Create index", returns = ?task); + debug!(returns = ?task, "Create index"); Ok(HttpResponse::Accepted().json(task)) } else { @@ -180,7 +180,7 @@ pub async fn get_index( let index = index_scheduler.index(&index_uid)?; let index_view = IndexView::new(index_uid.into_inner(), &index)?; - debug_span!("Get index", returns = ?index_view); + debug!(returns = ?index_view, "Get index"); Ok(HttpResponse::Ok().json(index_view)) } @@ -192,7 +192,7 @@ pub async fn update_index( req: HttpRequest, analytics: web::Data, ) -> Result { - debug_span!("Update index", parameters = ?body); + debug!(parameters = ?body, "Update index"); let index_uid = IndexUid::try_from(index_uid.into_inner())?; let body = body.into_inner(); analytics.publish( @@ -209,7 +209,7 @@ pub async fn update_index( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Update index", returns = ?task); + debug!(returns = ?task, "Update index"); Ok(HttpResponse::Accepted().json(task)) } @@ -221,7 +221,7 @@ pub async fn delete_index( let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Delete index", returns = ?task); + debug!(returns = ?task, "Delete index"); Ok(HttpResponse::Accepted().json(task)) } @@ -259,6 +259,6 @@ pub async fn get_index_stats( let stats = IndexStats::from(index_scheduler.index_stats(&index_uid)?); - debug_span!("Get index stats", returns = ?stats); + debug!(returns = ?stats, "Get index stats"); Ok(HttpResponse::Ok().json(stats)) } diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 83b496d0a..3adfce970 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -11,7 +11,7 @@ use meilisearch_types::milli; use meilisearch_types::milli::vector::DistributionShift; use meilisearch_types::serde_cs::vec::CS; use serde_json::Value; -use tracing::{debug_span, warn}; +use tracing::{debug, warn}; use crate::analytics::{Analytics, SearchAggregator}; use crate::extractors::authentication::policies::*; @@ -186,7 +186,7 @@ pub async fn search_with_url_query( req: HttpRequest, analytics: web::Data, ) -> Result { - debug_span!("Search get", parameters = ?params); + debug!(parameters = ?params, "Search get"); let index_uid = IndexUid::try_from(index_uid.into_inner())?; let mut query: SearchQuery = params.into_inner().into(); @@ -213,7 +213,7 @@ pub async fn search_with_url_query( let search_result = search_result?; - debug_span!("Search get", returns = ?search_result); + debug!(returns = ?search_result, "Search get"); Ok(HttpResponse::Ok().json(search_result)) } @@ -227,7 +227,7 @@ pub async fn search_with_post( let index_uid = IndexUid::try_from(index_uid.into_inner())?; let mut query = params.into_inner(); - debug_span!("Search post", parameters = ?query); + debug!(parameters = ?query, "Search post"); // Tenant token search_rules. if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) { @@ -252,7 +252,7 @@ pub async fn search_with_post( let search_result = search_result?; - debug_span!("Search post", returns = ?search_result); + debug!(returns = ?search_result, "Search post"); Ok(HttpResponse::Ok().json(search_result)) } diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 1613b8813..23e8925c7 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -10,7 +10,7 @@ use meilisearch_types::milli::update::Setting; use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked}; use meilisearch_types::tasks::KindWithContent; use serde_json::json; -use tracing::{debug_span}; +use tracing::debug; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -29,7 +29,7 @@ macro_rules! make_setting_route { use meilisearch_types::milli::update::Setting; use meilisearch_types::settings::{settings, Settings}; use meilisearch_types::tasks::KindWithContent; - use tracing::debug_span; + use tracing::debug; use $crate::analytics::Analytics; use $crate::extractors::authentication::policies::*; use $crate::extractors::authentication::GuardedData; @@ -61,7 +61,7 @@ macro_rules! make_setting_route { .await?? .into(); - debug_span!("Delete settings", returns = ?task); + debug!(returns = ?task, "Delete settings"); Ok(HttpResponse::Accepted().json(task)) } @@ -78,7 +78,7 @@ macro_rules! make_setting_route { let index_uid = IndexUid::try_from(index_uid.into_inner())?; let body = body.into_inner(); - debug_span!("Update settings", parameters = ?body); + debug!(parameters = ?body, "Update settings"); #[allow(clippy::redundant_closure_call)] $analytics(&body, &req); @@ -110,7 +110,7 @@ macro_rules! make_setting_route { .await?? .into(); - debug_span!("Update settings", returns = ?task); + debug!(returns = ?task, "Update settings"); Ok(HttpResponse::Accepted().json(task)) } @@ -127,7 +127,7 @@ macro_rules! make_setting_route { let rtxn = index.read_txn()?; let settings = settings(&index, &rtxn)?; - debug_span!("Update settings", returns = ?settings); + debug!(returns = ?settings, "Update settings"); let mut json = serde_json::json!(&settings); let val = json[$camelcase_attr].take(); @@ -657,7 +657,7 @@ pub async fn update_all( let index_uid = IndexUid::try_from(index_uid.into_inner())?; let new_settings = body.into_inner(); - debug_span!("Update all settings", parameters = ?new_settings); + debug!(parameters = ?new_settings, "Update all settings"); let new_settings = validate_settings(new_settings, &index_scheduler)?; analytics.publish( @@ -770,7 +770,7 @@ pub async fn update_all( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Update all settings", returns = ?task); + debug!(returns = ?task, "Update all settings"); Ok(HttpResponse::Accepted().json(task)) } @@ -783,7 +783,7 @@ pub async fn get_all( let index = index_scheduler.index(&index_uid)?; let rtxn = index.read_txn()?; let new_settings = settings(&index, &rtxn)?; - debug_span!("Get all settings", returns = ?new_settings); + debug!(returns = ?new_settings, "Get all settings"); Ok(HttpResponse::Ok().json(new_settings)) } @@ -806,7 +806,7 @@ pub async fn delete_all( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Delete all settings", returns = ?task); + debug!(returns = ?task, "Delete all settings"); Ok(HttpResponse::Accepted().json(task)) } diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index ed0ccd8db..89cf63c50 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -10,7 +10,7 @@ use meilisearch_types::tasks::{Kind, Status, Task, TaskId}; use serde::{Deserialize, Serialize}; use serde_json::json; use time::OffsetDateTime; -use tracing::{debug_span}; +use tracing::debug; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -252,7 +252,7 @@ async fn get_stats( let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?; - debug_span!("Get stats", returns = ?stats); + debug!(returns = ?stats, "Get stats"); Ok(HttpResponse::Ok().json(stats)) } diff --git a/meilisearch/src/routes/multi_search.rs b/meilisearch/src/routes/multi_search.rs index 2a369bed1..86aa58e70 100644 --- a/meilisearch/src/routes/multi_search.rs +++ b/meilisearch/src/routes/multi_search.rs @@ -7,7 +7,7 @@ use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::ResponseError; use meilisearch_types::keys::actions; use serde::Serialize; -use tracing::{debug_span}; +use tracing::debug; use crate::analytics::{Analytics, MultiSearchAggregator}; use crate::extractors::authentication::policies::ActionPolicy; @@ -52,7 +52,7 @@ pub async fn multi_search_with_post( for (query_index, (index_uid, mut query)) in queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate() { - debug_span!("Multi-search", on_index = query_index, parameters = ?query); + debug!(on_index = query_index, parameters = ?query, "Multi-search"); // Check index from API key if !index_scheduler.filters().is_index_authorized(&index_uid) { @@ -107,7 +107,7 @@ pub async fn multi_search_with_post( err })?; - debug_span!("Multi-search", returns = ?search_results); + debug!(returns = ?search_results, "Multi-search"); Ok(HttpResponse::Ok().json(SearchResults { results: search_results })) } diff --git a/meilisearch/src/routes/snapshot.rs b/meilisearch/src/routes/snapshot.rs index 4f329d251..c94529932 100644 --- a/meilisearch/src/routes/snapshot.rs +++ b/meilisearch/src/routes/snapshot.rs @@ -4,7 +4,7 @@ use index_scheduler::IndexScheduler; use meilisearch_types::error::ResponseError; use meilisearch_types::tasks::KindWithContent; use serde_json::json; -use tracing::debug_span; +use tracing::debug; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -27,6 +27,6 @@ pub async fn create_snapshot( let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); - debug_span!("Create snapshot", returns = ?task); + debug!(returns = ?task, "Create snapshot"); Ok(HttpResponse::Accepted().json(task)) } diff --git a/milli/src/update/index_documents/extract/mod.rs b/milli/src/update/index_documents/extract/mod.rs index 64dc0b7db..aac60e6e6 100644 --- a/milli/src/update/index_documents/extract/mod.rs +++ b/milli/src/update/index_documents/extract/mod.rs @@ -15,7 +15,7 @@ use std::io::BufReader; use crossbeam_channel::Sender; use rayon::prelude::*; -use tracing::{debug_span}; +use tracing::debug; use self::extract_docid_word_positions::extract_docid_word_positions; use self::extract_facet_number_docids::extract_facet_number_docids; @@ -114,7 +114,7 @@ pub(crate) fn data_from_obkv_documents( { let lmdb_writer_sx = lmdb_writer_sx.clone(); rayon::spawn(move || { - debug_span!("merge", database = "facet-id-exists-docids"); + debug!(database = "facet-id-exists-docids", "merge"); match facet_exists_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { Ok(reader) => { let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetExistsDocids(reader))); @@ -130,7 +130,7 @@ pub(crate) fn data_from_obkv_documents( { let lmdb_writer_sx = lmdb_writer_sx.clone(); rayon::spawn(move || { - debug_span!("merge", database = "facet-id-is-null-docids"); + debug!(database = "facet-id-is-null-docids", "merge"); match facet_is_null_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { Ok(reader) => { let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader))); @@ -146,7 +146,7 @@ pub(crate) fn data_from_obkv_documents( { let lmdb_writer_sx = lmdb_writer_sx.clone(); rayon::spawn(move || { - debug_span!("merge", database = "facet-id-is-empty-docids"); + debug!(database = "facet-id-is-empty-docids", "merge"); match facet_is_empty_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) { Ok(reader) => { let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader))); @@ -231,7 +231,7 @@ pub(crate) fn data_from_obkv_documents( extract_facet_number_docids, merge_deladd_cbo_roaring_bitmaps, TypedChunk::FieldIdFacetNumberDocids, - "field-id-facet-number-docids", + "field-id-facet-number-docidsdexing::details, ", ); Ok(()) @@ -261,18 +261,19 @@ fn spawn_extraction_task( let current_span = tracing::Span::current(); rayon::spawn(move || { - let child_span = tracing::trace_span!(target: "indexing::details", parent: ¤t_span, "extract_multiple_chunks"); + let child_span = + tracing::trace_span!(target: "", parent: ¤t_span, "extract_multiple_chunks"); let _entered = child_span.enter(); - puffin::profile_scope!("extract_multiple_chunks", name); + puffin::profile_scope!("extract_multiple_chunksdexing::details, ", name); let chunks: Result = chunks.into_par_iter().map(|chunk| extract_fn(chunk, indexer)).collect(); let current_span = tracing::Span::current(); rayon::spawn(move || match chunks { Ok(chunks) => { - let child_span = tracing::trace_span!(target: "indexing::details", parent: ¤t_span, "merge_multiple_chunks"); + let child_span = tracing::trace_span!(target: "", parent: ¤t_span, "merge_multiple_chunks"); let _entered = child_span.enter(); - debug_span!("merge", database = name); + debug!(database = name, "merge"); puffin::profile_scope!("merge_multiple_chunks", name); let reader = chunks.merge(merge_fn, &indexer); let _ = lmdb_writer_sx.send(reader.map(serialize_fn));