mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-23 02:27:40 +08:00
use debug instead of debug_span
This commit is contained in:
parent
ef994d84d0
commit
1502382316
@ -5,7 +5,7 @@ use meilisearch_auth::AuthController;
|
|||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tracing::debug_span;
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@ -32,6 +32,6 @@ pub async fn create_dump(
|
|||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
debug_span!("Create dump", returns = ?task);
|
debug!(returns = ?task, "Create dump");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@ use meilisearch_types::deserr::DeserrJsonError;
|
|||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::keys::actions;
|
use meilisearch_types::keys::actions;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tracing::{debug_span};
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
@ -34,7 +34,7 @@ async fn get_features(
|
|||||||
|
|
||||||
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
||||||
let features = features.runtime_features();
|
let features = features.runtime_features();
|
||||||
debug_span!("Get features", returns = ?features);
|
debug!(returns = ?features, "Get features");
|
||||||
HttpResponse::Ok().json(features)
|
HttpResponse::Ok().json(features)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,7 +61,7 @@ async fn patch_features(
|
|||||||
analytics: Data<dyn Analytics>,
|
analytics: Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let features = index_scheduler.features();
|
let features = index_scheduler.features();
|
||||||
debug_span!("Patch features", parameters = ?new_features);
|
debug!(parameters = ?new_features, "Patch features");
|
||||||
|
|
||||||
let old_features = features.runtime_features();
|
let old_features = features.runtime_features();
|
||||||
let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
|
let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
|
||||||
@ -95,6 +95,6 @@ async fn patch_features(
|
|||||||
Some(&req),
|
Some(&req),
|
||||||
);
|
);
|
||||||
index_scheduler.put_runtime_features(new_features)?;
|
index_scheduler.put_runtime_features(new_features)?;
|
||||||
debug_span!("Patch features", returns = ?new_features);
|
debug!(returns = ?new_features, "Patch features");
|
||||||
Ok(HttpResponse::Ok().json(new_features))
|
Ok(HttpResponse::Ok().json(new_features))
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ use serde_json::Value;
|
|||||||
use tempfile::tempfile;
|
use tempfile::tempfile;
|
||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
||||||
use tracing::{debug_span};
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
|
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
@ -101,7 +101,7 @@ pub async fn get_document(
|
|||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let DocumentParam { index_uid, document_id } = document_param.into_inner();
|
let DocumentParam { index_uid, document_id } = document_param.into_inner();
|
||||||
debug_span!("Get document", parameters = ?params);
|
debug!(parameters = ?params, "Get document");
|
||||||
let index_uid = IndexUid::try_from(index_uid)?;
|
let index_uid = IndexUid::try_from(index_uid)?;
|
||||||
|
|
||||||
analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req);
|
analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req);
|
||||||
@ -111,7 +111,7 @@ pub async fn get_document(
|
|||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?;
|
let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?;
|
||||||
debug_span!("Get document", returns = ?document);
|
debug!(returns = ?document, "Get document");
|
||||||
Ok(HttpResponse::Ok().json(document))
|
Ok(HttpResponse::Ok().json(document))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,7 +132,7 @@ pub async fn delete_document(
|
|||||||
};
|
};
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
debug_span!("Delete document", returns = ?task);
|
debug!(returns = ?task, "Delete document");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -170,7 +170,7 @@ pub async fn documents_by_query_post(
|
|||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
debug_span!("Get documents POST", parameters = ?body);
|
debug!(parameters = ?body, "Get documents POST");
|
||||||
|
|
||||||
analytics.post_fetch_documents(
|
analytics.post_fetch_documents(
|
||||||
&DocumentFetchKind::Normal {
|
&DocumentFetchKind::Normal {
|
||||||
@ -191,7 +191,7 @@ pub async fn get_documents(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug_span!("Get documents GET", parameters = ?params);
|
debug!(parameters = ?params, "Get documents GET");
|
||||||
|
|
||||||
let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner();
|
let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner();
|
||||||
|
|
||||||
@ -235,7 +235,7 @@ fn documents_by_query(
|
|||||||
|
|
||||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
||||||
|
|
||||||
debug_span!("Get documents", returns = ?ret);
|
debug!(returns = ?ret, "Get documents");
|
||||||
Ok(HttpResponse::Ok().json(ret))
|
Ok(HttpResponse::Ok().json(ret))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -271,7 +271,7 @@ pub async fn replace_documents(
|
|||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
debug_span!("Replace documents", parameters = ?params);
|
debug!(parameters = ?params, "Replace documents");
|
||||||
let params = params.into_inner();
|
let params = params.into_inner();
|
||||||
|
|
||||||
analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||||
@ -288,7 +288,7 @@ pub async fn replace_documents(
|
|||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
debug_span!("Replace documents", returns = ?task);
|
debug!(returns = ?task, "Replace documents");
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@ -304,7 +304,7 @@ pub async fn update_documents(
|
|||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let params = params.into_inner();
|
let params = params.into_inner();
|
||||||
debug_span!("Update documents", parameters = ?params);
|
debug!(parameters = ?params, "Update documents");
|
||||||
|
|
||||||
analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||||
|
|
||||||
@ -320,7 +320,7 @@ pub async fn update_documents(
|
|||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
debug_span!("Update documents", returns = ?task);
|
debug!(returns = ?task, "Update documents");
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@ -468,7 +468,7 @@ pub async fn delete_documents_batch(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug_span!("Delete documents by batch", parameters = ?body);
|
debug!(parameters = ?body, "Delete documents by batch");
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
||||||
@ -483,7 +483,7 @@ pub async fn delete_documents_batch(
|
|||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
debug_span!("Delete documents by batch", returns = ?task);
|
debug!(returns = ?task, "Delete documents by batch");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -501,7 +501,7 @@ pub async fn delete_documents_by_filter(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug_span!("Delete documents by filter", parameters = ?body);
|
debug!(parameters = ?body, "Delete documents by filter");
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
let index_uid = index_uid.into_inner();
|
let index_uid = index_uid.into_inner();
|
||||||
let filter = body.into_inner().filter;
|
let filter = body.into_inner().filter;
|
||||||
@ -519,7 +519,7 @@ pub async fn delete_documents_by_filter(
|
|||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
debug_span!("Delete documents by filter", returns = ?task);
|
debug!(returns = ?task, "Delete documents by filter");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -536,7 +536,7 @@ pub async fn clear_all_documents(
|
|||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
debug_span!("Delete all documents", returns = ?task);
|
debug!(returns = ?task, "Delete all documents");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ use meilisearch_types::error::deserr_codes::*;
|
|||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tracing::debug_span;
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::{Analytics, FacetSearchAggregator};
|
use crate::analytics::{Analytics, FacetSearchAggregator};
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@ -56,7 +56,7 @@ pub async fn search(
|
|||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let query = params.into_inner();
|
let query = params.into_inner();
|
||||||
debug_span!("Facet search", parameters = ?query);
|
debug!(parameters = ?query, "Facet search");
|
||||||
|
|
||||||
let mut aggregate = FacetSearchAggregator::from_query(&query, &req);
|
let mut aggregate = FacetSearchAggregator::from_query(&query, &req);
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ pub async fn search(
|
|||||||
|
|
||||||
let search_result = search_result?;
|
let search_result = search_result?;
|
||||||
|
|
||||||
debug_span!("Facet search", returns = ?search_result);
|
debug!(returns = ?search_result, "Facet search");
|
||||||
Ok(HttpResponse::Ok().json(search_result))
|
Ok(HttpResponse::Ok().json(search_result))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ use meilisearch_types::tasks::KindWithContent;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::debug_span;
|
use tracing::debug;
|
||||||
|
|
||||||
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
@ -93,7 +93,7 @@ pub async fn list_indexes(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
||||||
paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>,
|
paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug_span!("List indexes", parameters = ?paginate);
|
debug!(parameters = ?paginate, "List indexes");
|
||||||
let filters = index_scheduler.filters();
|
let filters = index_scheduler.filters();
|
||||||
let indexes: Vec<Option<IndexView>> =
|
let indexes: Vec<Option<IndexView>> =
|
||||||
index_scheduler.try_for_each_index(|uid, index| -> Result<Option<IndexView>, _> {
|
index_scheduler.try_for_each_index(|uid, index| -> Result<Option<IndexView>, _> {
|
||||||
@ -106,7 +106,7 @@ pub async fn list_indexes(
|
|||||||
let indexes: Vec<IndexView> = indexes.into_iter().flatten().collect();
|
let indexes: Vec<IndexView> = indexes.into_iter().flatten().collect();
|
||||||
let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter());
|
let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter());
|
||||||
|
|
||||||
debug_span!("List indexes", returns = ?ret);
|
debug!(returns = ?ret, "List indexes");
|
||||||
Ok(HttpResponse::Ok().json(ret))
|
Ok(HttpResponse::Ok().json(ret))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -125,7 +125,7 @@ pub async fn create_index(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug_span!("Create index", parameters = ?body);
|
debug!(parameters = ?body, "Create index");
|
||||||
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid);
|
let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid);
|
||||||
@ -139,7 +139,7 @@ pub async fn create_index(
|
|||||||
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
|
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
debug_span!("Create index", returns = ?task);
|
debug!(returns = ?task, "Create index");
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
} else {
|
} else {
|
||||||
@ -180,7 +180,7 @@ pub async fn get_index(
|
|||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let index_view = IndexView::new(index_uid.into_inner(), &index)?;
|
let index_view = IndexView::new(index_uid.into_inner(), &index)?;
|
||||||
|
|
||||||
debug_span!("Get index", returns = ?index_view);
|
debug!(returns = ?index_view, "Get index");
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(index_view))
|
Ok(HttpResponse::Ok().json(index_view))
|
||||||
}
|
}
|
||||||
@ -192,7 +192,7 @@ pub async fn update_index(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug_span!("Update index", parameters = ?body);
|
debug!(parameters = ?body, "Update index");
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
@ -209,7 +209,7 @@ pub async fn update_index(
|
|||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
debug_span!("Update index", returns = ?task);
|
debug!(returns = ?task, "Update index");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -221,7 +221,7 @@ pub async fn delete_index(
|
|||||||
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
|
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
debug_span!("Delete index", returns = ?task);
|
debug!(returns = ?task, "Delete index");
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@ -259,6 +259,6 @@ pub async fn get_index_stats(
|
|||||||
|
|
||||||
let stats = IndexStats::from(index_scheduler.index_stats(&index_uid)?);
|
let stats = IndexStats::from(index_scheduler.index_stats(&index_uid)?);
|
||||||
|
|
||||||
debug_span!("Get index stats", returns = ?stats);
|
debug!(returns = ?stats, "Get index stats");
|
||||||
Ok(HttpResponse::Ok().json(stats))
|
Ok(HttpResponse::Ok().json(stats))
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,7 @@ use meilisearch_types::milli;
|
|||||||
use meilisearch_types::milli::vector::DistributionShift;
|
use meilisearch_types::milli::vector::DistributionShift;
|
||||||
use meilisearch_types::serde_cs::vec::CS;
|
use meilisearch_types::serde_cs::vec::CS;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tracing::{debug_span, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use crate::analytics::{Analytics, SearchAggregator};
|
use crate::analytics::{Analytics, SearchAggregator};
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@ -186,7 +186,7 @@ pub async fn search_with_url_query(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug_span!("Search get", parameters = ?params);
|
debug!(parameters = ?params, "Search get");
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let mut query: SearchQuery = params.into_inner().into();
|
let mut query: SearchQuery = params.into_inner().into();
|
||||||
@ -213,7 +213,7 @@ pub async fn search_with_url_query(
|
|||||||
|
|
||||||
let search_result = search_result?;
|
let search_result = search_result?;
|
||||||
|
|
||||||
debug_span!("Search get", returns = ?search_result);
|
debug!(returns = ?search_result, "Search get");
|
||||||
Ok(HttpResponse::Ok().json(search_result))
|
Ok(HttpResponse::Ok().json(search_result))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -227,7 +227,7 @@ pub async fn search_with_post(
|
|||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let mut query = params.into_inner();
|
let mut query = params.into_inner();
|
||||||
debug_span!("Search post", parameters = ?query);
|
debug!(parameters = ?query, "Search post");
|
||||||
|
|
||||||
// Tenant token search_rules.
|
// Tenant token search_rules.
|
||||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||||
@ -252,7 +252,7 @@ pub async fn search_with_post(
|
|||||||
|
|
||||||
let search_result = search_result?;
|
let search_result = search_result?;
|
||||||
|
|
||||||
debug_span!("Search post", returns = ?search_result);
|
debug!(returns = ?search_result, "Search post");
|
||||||
Ok(HttpResponse::Ok().json(search_result))
|
Ok(HttpResponse::Ok().json(search_result))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ use meilisearch_types::milli::update::Setting;
|
|||||||
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tracing::{debug_span};
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@ -29,7 +29,7 @@ macro_rules! make_setting_route {
|
|||||||
use meilisearch_types::milli::update::Setting;
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::settings::{settings, Settings};
|
use meilisearch_types::settings::{settings, Settings};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use tracing::debug_span;
|
use tracing::debug;
|
||||||
use $crate::analytics::Analytics;
|
use $crate::analytics::Analytics;
|
||||||
use $crate::extractors::authentication::policies::*;
|
use $crate::extractors::authentication::policies::*;
|
||||||
use $crate::extractors::authentication::GuardedData;
|
use $crate::extractors::authentication::GuardedData;
|
||||||
@ -61,7 +61,7 @@ macro_rules! make_setting_route {
|
|||||||
.await??
|
.await??
|
||||||
.into();
|
.into();
|
||||||
|
|
||||||
debug_span!("Delete settings", returns = ?task);
|
debug!(returns = ?task, "Delete settings");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -78,7 +78,7 @@ macro_rules! make_setting_route {
|
|||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
debug_span!("Update settings", parameters = ?body);
|
debug!(parameters = ?body, "Update settings");
|
||||||
|
|
||||||
#[allow(clippy::redundant_closure_call)]
|
#[allow(clippy::redundant_closure_call)]
|
||||||
$analytics(&body, &req);
|
$analytics(&body, &req);
|
||||||
@ -110,7 +110,7 @@ macro_rules! make_setting_route {
|
|||||||
.await??
|
.await??
|
||||||
.into();
|
.into();
|
||||||
|
|
||||||
debug_span!("Update settings", returns = ?task);
|
debug!(returns = ?task, "Update settings");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ macro_rules! make_setting_route {
|
|||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
let settings = settings(&index, &rtxn)?;
|
let settings = settings(&index, &rtxn)?;
|
||||||
|
|
||||||
debug_span!("Update settings", returns = ?settings);
|
debug!(returns = ?settings, "Update settings");
|
||||||
let mut json = serde_json::json!(&settings);
|
let mut json = serde_json::json!(&settings);
|
||||||
let val = json[$camelcase_attr].take();
|
let val = json[$camelcase_attr].take();
|
||||||
|
|
||||||
@ -657,7 +657,7 @@ pub async fn update_all(
|
|||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let new_settings = body.into_inner();
|
let new_settings = body.into_inner();
|
||||||
debug_span!("Update all settings", parameters = ?new_settings);
|
debug!(parameters = ?new_settings, "Update all settings");
|
||||||
let new_settings = validate_settings(new_settings, &index_scheduler)?;
|
let new_settings = validate_settings(new_settings, &index_scheduler)?;
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
@ -770,7 +770,7 @@ pub async fn update_all(
|
|||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
debug_span!("Update all settings", returns = ?task);
|
debug!(returns = ?task, "Update all settings");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -783,7 +783,7 @@ pub async fn get_all(
|
|||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
let new_settings = settings(&index, &rtxn)?;
|
let new_settings = settings(&index, &rtxn)?;
|
||||||
debug_span!("Get all settings", returns = ?new_settings);
|
debug!(returns = ?new_settings, "Get all settings");
|
||||||
Ok(HttpResponse::Ok().json(new_settings))
|
Ok(HttpResponse::Ok().json(new_settings))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -806,7 +806,7 @@ pub async fn delete_all(
|
|||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
debug_span!("Delete all settings", returns = ?task);
|
debug!(returns = ?task, "Delete all settings");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::{debug_span};
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@ -252,7 +252,7 @@ async fn get_stats(
|
|||||||
|
|
||||||
let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?;
|
let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?;
|
||||||
|
|
||||||
debug_span!("Get stats", returns = ?stats);
|
debug!(returns = ?stats, "Get stats");
|
||||||
Ok(HttpResponse::Ok().json(stats))
|
Ok(HttpResponse::Ok().json(stats))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ use meilisearch_types::deserr::DeserrJsonError;
|
|||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::keys::actions;
|
use meilisearch_types::keys::actions;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use tracing::{debug_span};
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::{Analytics, MultiSearchAggregator};
|
use crate::analytics::{Analytics, MultiSearchAggregator};
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
@ -52,7 +52,7 @@ pub async fn multi_search_with_post(
|
|||||||
for (query_index, (index_uid, mut query)) in
|
for (query_index, (index_uid, mut query)) in
|
||||||
queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate()
|
queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate()
|
||||||
{
|
{
|
||||||
debug_span!("Multi-search", on_index = query_index, parameters = ?query);
|
debug!(on_index = query_index, parameters = ?query, "Multi-search");
|
||||||
|
|
||||||
// Check index from API key
|
// Check index from API key
|
||||||
if !index_scheduler.filters().is_index_authorized(&index_uid) {
|
if !index_scheduler.filters().is_index_authorized(&index_uid) {
|
||||||
@ -107,7 +107,7 @@ pub async fn multi_search_with_post(
|
|||||||
err
|
err
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
debug_span!("Multi-search", returns = ?search_results);
|
debug!(returns = ?search_results, "Multi-search");
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(SearchResults { results: search_results }))
|
Ok(HttpResponse::Ok().json(SearchResults { results: search_results }))
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ use index_scheduler::IndexScheduler;
|
|||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tracing::debug_span;
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@ -27,6 +27,6 @@ pub async fn create_snapshot(
|
|||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
|
|
||||||
debug_span!("Create snapshot", returns = ?task);
|
debug!(returns = ?task, "Create snapshot");
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ use std::io::BufReader;
|
|||||||
|
|
||||||
use crossbeam_channel::Sender;
|
use crossbeam_channel::Sender;
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
use tracing::{debug_span};
|
use tracing::debug;
|
||||||
|
|
||||||
use self::extract_docid_word_positions::extract_docid_word_positions;
|
use self::extract_docid_word_positions::extract_docid_word_positions;
|
||||||
use self::extract_facet_number_docids::extract_facet_number_docids;
|
use self::extract_facet_number_docids::extract_facet_number_docids;
|
||||||
@ -114,7 +114,7 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
{
|
{
|
||||||
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
||||||
rayon::spawn(move || {
|
rayon::spawn(move || {
|
||||||
debug_span!("merge", database = "facet-id-exists-docids");
|
debug!(database = "facet-id-exists-docids", "merge");
|
||||||
match facet_exists_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
match facet_exists_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
||||||
Ok(reader) => {
|
Ok(reader) => {
|
||||||
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetExistsDocids(reader)));
|
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetExistsDocids(reader)));
|
||||||
@ -130,7 +130,7 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
{
|
{
|
||||||
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
||||||
rayon::spawn(move || {
|
rayon::spawn(move || {
|
||||||
debug_span!("merge", database = "facet-id-is-null-docids");
|
debug!(database = "facet-id-is-null-docids", "merge");
|
||||||
match facet_is_null_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
match facet_is_null_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
||||||
Ok(reader) => {
|
Ok(reader) => {
|
||||||
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader)));
|
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader)));
|
||||||
@ -146,7 +146,7 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
{
|
{
|
||||||
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
||||||
rayon::spawn(move || {
|
rayon::spawn(move || {
|
||||||
debug_span!("merge", database = "facet-id-is-empty-docids");
|
debug!(database = "facet-id-is-empty-docids", "merge");
|
||||||
match facet_is_empty_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
match facet_is_empty_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
||||||
Ok(reader) => {
|
Ok(reader) => {
|
||||||
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader)));
|
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader)));
|
||||||
@ -231,7 +231,7 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
extract_facet_number_docids,
|
extract_facet_number_docids,
|
||||||
merge_deladd_cbo_roaring_bitmaps,
|
merge_deladd_cbo_roaring_bitmaps,
|
||||||
TypedChunk::FieldIdFacetNumberDocids,
|
TypedChunk::FieldIdFacetNumberDocids,
|
||||||
"field-id-facet-number-docids",
|
"field-id-facet-number-docidsdexing::details, ",
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -261,18 +261,19 @@ fn spawn_extraction_task<FE, FS, M>(
|
|||||||
let current_span = tracing::Span::current();
|
let current_span = tracing::Span::current();
|
||||||
|
|
||||||
rayon::spawn(move || {
|
rayon::spawn(move || {
|
||||||
let child_span = tracing::trace_span!(target: "indexing::details", parent: ¤t_span, "extract_multiple_chunks");
|
let child_span =
|
||||||
|
tracing::trace_span!(target: "", parent: ¤t_span, "extract_multiple_chunks");
|
||||||
let _entered = child_span.enter();
|
let _entered = child_span.enter();
|
||||||
puffin::profile_scope!("extract_multiple_chunks", name);
|
puffin::profile_scope!("extract_multiple_chunksdexing::details, ", name);
|
||||||
let chunks: Result<M> =
|
let chunks: Result<M> =
|
||||||
chunks.into_par_iter().map(|chunk| extract_fn(chunk, indexer)).collect();
|
chunks.into_par_iter().map(|chunk| extract_fn(chunk, indexer)).collect();
|
||||||
let current_span = tracing::Span::current();
|
let current_span = tracing::Span::current();
|
||||||
|
|
||||||
rayon::spawn(move || match chunks {
|
rayon::spawn(move || match chunks {
|
||||||
Ok(chunks) => {
|
Ok(chunks) => {
|
||||||
let child_span = tracing::trace_span!(target: "indexing::details", parent: ¤t_span, "merge_multiple_chunks");
|
let child_span = tracing::trace_span!(target: "", parent: ¤t_span, "merge_multiple_chunks");
|
||||||
let _entered = child_span.enter();
|
let _entered = child_span.enter();
|
||||||
debug_span!("merge", database = name);
|
debug!(database = name, "merge");
|
||||||
puffin::profile_scope!("merge_multiple_chunks", name);
|
puffin::profile_scope!("merge_multiple_chunks", name);
|
||||||
let reader = chunks.merge(merge_fn, &indexer);
|
let reader = chunks.merge(merge_fn, &indexer);
|
||||||
let _ = lmdb_writer_sx.send(reader.map(serialize_fn));
|
let _ = lmdb_writer_sx.send(reader.map(serialize_fn));
|
||||||
|
Loading…
Reference in New Issue
Block a user