mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-22 18:17:39 +08:00
implements the new analytics for the get documents routes
This commit is contained in:
parent
2cdcb703d9
commit
63dded3961
@ -74,8 +74,8 @@ pub enum DocumentDeletionKind {
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum DocumentFetchKind {
|
||||
PerDocumentId,
|
||||
Normal { with_filter: bool, limit: usize, offset: usize },
|
||||
PerDocumentId { retrieve_vectors: bool },
|
||||
Normal { with_filter: bool, limit: usize, offset: usize, retrieve_vectors: bool },
|
||||
}
|
||||
|
||||
pub trait Analytics: Sync + Send {
|
||||
|
@ -1542,6 +1542,9 @@ pub struct DocumentsFetchAggregator {
|
||||
// if a filter was used
|
||||
per_filter: bool,
|
||||
|
||||
#[serde(rename = "vector.retrieve_vectors")]
|
||||
retrieve_vectors: bool,
|
||||
|
||||
// pagination
|
||||
#[serde(rename = "pagination.max_limit")]
|
||||
max_limit: usize,
|
||||
@ -1551,18 +1554,21 @@ pub struct DocumentsFetchAggregator {
|
||||
|
||||
impl DocumentsFetchAggregator {
|
||||
pub fn from_query(query: &DocumentFetchKind, request: &HttpRequest) -> Self {
|
||||
let (limit, offset) = match query {
|
||||
DocumentFetchKind::PerDocumentId => (1, 0),
|
||||
DocumentFetchKind::Normal { limit, offset, .. } => (*limit, *offset),
|
||||
let (limit, offset, retrieve_vectors) = match query {
|
||||
DocumentFetchKind::PerDocumentId { retrieve_vectors } => (1, 0, *retrieve_vectors),
|
||||
DocumentFetchKind::Normal { limit, offset, retrieve_vectors, .. } => {
|
||||
(*limit, *offset, *retrieve_vectors)
|
||||
}
|
||||
};
|
||||
Self {
|
||||
timestamp: Some(OffsetDateTime::now_utc()),
|
||||
user_agents: extract_user_agents(request).into_iter().collect(),
|
||||
total_received: 1,
|
||||
per_document_id: matches!(query, DocumentFetchKind::PerDocumentId),
|
||||
per_document_id: matches!(query, DocumentFetchKind::PerDocumentId { .. }),
|
||||
per_filter: matches!(query, DocumentFetchKind::Normal { with_filter, .. } if *with_filter),
|
||||
max_limit: limit,
|
||||
max_offset: offset,
|
||||
retrieve_vectors,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1576,6 +1582,7 @@ impl DocumentsFetchAggregator {
|
||||
per_filter,
|
||||
max_limit,
|
||||
max_offset,
|
||||
retrieve_vectors,
|
||||
} = other;
|
||||
|
||||
if self.timestamp.is_none() {
|
||||
@ -1591,6 +1598,8 @@ impl DocumentsFetchAggregator {
|
||||
|
||||
self.max_limit = self.max_limit.max(max_limit);
|
||||
self.max_offset = self.max_offset.max(max_offset);
|
||||
|
||||
self.retrieve_vectors |= retrieve_vectors;
|
||||
}
|
||||
|
||||
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
||||
|
@ -110,7 +110,10 @@ pub async fn get_document(
|
||||
debug!(parameters = ?params, "Get document");
|
||||
let index_uid = IndexUid::try_from(index_uid)?;
|
||||
|
||||
analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req);
|
||||
analytics.get_fetch_documents(
|
||||
&DocumentFetchKind::PerDocumentId { retrieve_vectors: params.retrieve_vectors.0 },
|
||||
&req,
|
||||
);
|
||||
|
||||
let GetDocument { fields, retrieve_vectors } = params.into_inner();
|
||||
let attributes_to_retrieve = fields.merge_star_and_none();
|
||||
@ -193,6 +196,7 @@ pub async fn documents_by_query_post(
|
||||
with_filter: body.filter.is_some(),
|
||||
limit: body.limit,
|
||||
offset: body.offset,
|
||||
retrieve_vectors: body.retrieve_vectors,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
@ -232,6 +236,7 @@ pub async fn get_documents(
|
||||
with_filter: query.filter.is_some(),
|
||||
limit: query.limit,
|
||||
offset: query.offset,
|
||||
retrieve_vectors: query.retrieve_vectors,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
Loading…
Reference in New Issue
Block a user