Add requestUid field in search response and add debug logs with requestUid

This commit is contained in:
ManyTheFish
2025-08-26 18:05:06 +02:00
parent 4fe073cc1a
commit 1d78637872
6 changed files with 67 additions and 9 deletions

View File

@ -13,6 +13,7 @@ use meilisearch_types::serde_cs::vec::CS;
use serde_json::Value; use serde_json::Value;
use tracing::debug; use tracing::debug;
use utoipa::{IntoParams, OpenApi}; use utoipa::{IntoParams, OpenApi};
use uuid::Uuid;
use crate::analytics::Analytics; use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError; use crate::error::MeilisearchHttpError;
@ -325,7 +326,8 @@ pub async fn search_with_url_query(
req: HttpRequest, req: HttpRequest,
analytics: web::Data<Analytics>, analytics: web::Data<Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!(parameters = ?params, "Search get"); let request_uid = Uuid::now_v7();
debug!(request_uid = ?request_uid, parameters = ?params, "Search get");
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let mut query: SearchQuery = params.into_inner().try_into()?; let mut query: SearchQuery = params.into_inner().try_into()?;
@ -351,6 +353,7 @@ pub async fn search_with_url_query(
search_kind, search_kind,
retrieve_vector, retrieve_vector,
index_scheduler.features(), index_scheduler.features(),
request_uid,
) )
}) })
.await; .await;
@ -363,7 +366,7 @@ pub async fn search_with_url_query(
let search_result = search_result?; let search_result = search_result?;
debug!(returns = ?search_result, "Search get"); debug!(request_uid = ?request_uid, returns = ?search_result, "Search get");
Ok(HttpResponse::Ok().json(search_result)) Ok(HttpResponse::Ok().json(search_result))
} }
@ -432,9 +435,10 @@ pub async fn search_with_post(
analytics: web::Data<Analytics>, analytics: web::Data<Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let request_uid = Uuid::now_v7();
let mut query = params.into_inner(); let mut query = params.into_inner();
debug!(parameters = ?query, "Search post"); debug!(request_uid = ?request_uid, parameters = ?query, "Search post");
// Tenant token search_rules. // Tenant token search_rules.
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) { if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
@ -458,6 +462,7 @@ pub async fn search_with_post(
search_kind, search_kind,
retrieve_vectors, retrieve_vectors,
index_scheduler.features(), index_scheduler.features(),
request_uid,
) )
}) })
.await; .await;
@ -473,7 +478,7 @@ pub async fn search_with_post(
let search_result = search_result?; let search_result = search_result?;
debug!(returns = ?search_result, "Search post"); debug!(request_uid = ?request_uid, returns = ?search_result, "Search post");
Ok(HttpResponse::Ok().json(search_result)) Ok(HttpResponse::Ok().json(search_result))
} }

View File

@ -234,6 +234,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
facet_stats: _, facet_stats: _,
degraded, degraded,
used_negative_operator, used_negative_operator,
request_uid: _,
} = result; } = result;
self.total_succeeded = self.total_succeeded.saturating_add(1); self.total_succeeded = self.total_succeeded.saturating_add(1);

View File

@ -9,6 +9,7 @@ use meilisearch_types::keys::actions;
use serde::Serialize; use serde::Serialize;
use tracing::debug; use tracing::debug;
use utoipa::{OpenApi, ToSchema}; use utoipa::{OpenApi, ToSchema};
use uuid::Uuid;
use super::multi_search_analytics::MultiSearchAggregator; use super::multi_search_analytics::MultiSearchAggregator;
use crate::analytics::Analytics; use crate::analytics::Analytics;
@ -151,6 +152,7 @@ pub async fn multi_search_with_post(
// Since we don't want to process half of the search requests and then get a permit refused // Since we don't want to process half of the search requests and then get a permit refused
// we're going to get one permit for the whole duration of the multi-search request. // we're going to get one permit for the whole duration of the multi-search request.
let permit = search_queue.try_get_search_permit().await?; let permit = search_queue.try_get_search_permit().await?;
let request_uid = Uuid::now_v7();
let federated_search = params.into_inner(); let federated_search = params.into_inner();
@ -188,13 +190,26 @@ pub async fn multi_search_with_post(
let response = match federation { let response = match federation {
Some(federation) => { Some(federation) => {
debug!(
request_uid = ?request_uid,
federation = ?federation,
parameters = ?queries,
"Federated-search"
);
// check remote header // check remote header
let is_proxy = req let is_proxy = req
.headers() .headers()
.get(PROXY_SEARCH_HEADER) .get(PROXY_SEARCH_HEADER)
.is_some_and(|value| value.as_bytes() == PROXY_SEARCH_HEADER_VALUE.as_bytes()); .is_some_and(|value| value.as_bytes() == PROXY_SEARCH_HEADER_VALUE.as_bytes());
let search_result = let search_result = perform_federated_search(
perform_federated_search(&index_scheduler, queries, federation, features, is_proxy) &index_scheduler,
queries,
federation,
features,
is_proxy,
request_uid,
)
.await; .await;
permit.drop().await; permit.drop().await;
@ -203,6 +218,13 @@ pub async fn multi_search_with_post(
} }
analytics.publish(multi_aggregate, &req); analytics.publish(multi_aggregate, &req);
debug!(
request_uid = ?request_uid,
returns = ?search_result,
"Federated-search"
);
HttpResponse::Ok().json(search_result?) HttpResponse::Ok().json(search_result?)
} }
None => { None => {
@ -216,7 +238,12 @@ pub async fn multi_search_with_post(
.map(SearchQueryWithIndex::into_index_query_federation) .map(SearchQueryWithIndex::into_index_query_federation)
.enumerate() .enumerate()
{ {
debug!(on_index = query_index, parameters = ?query, "Multi-search"); debug!(
request_uid = ?request_uid,
on_index = query_index,
parameters = ?query,
"Multi-search"
);
if federation_options.is_some() { if federation_options.is_some() {
return Err(( return Err((
@ -258,6 +285,7 @@ pub async fn multi_search_with_post(
search_kind, search_kind,
retrieve_vector, retrieve_vector,
features, features,
request_uid,
) )
}) })
.await .await
@ -286,7 +314,11 @@ pub async fn multi_search_with_post(
err err
})?; })?;
debug!(returns = ?search_results, "Multi-search"); debug!(
request_uid = ?request_uid,
returns = ?search_results,
"Multi-search"
);
HttpResponse::Ok().json(SearchResults { results: search_results }) HttpResponse::Ok().json(SearchResults { results: search_results })
} }

View File

@ -17,6 +17,7 @@ use meilisearch_types::milli::vector::Embedding;
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget, DEFAULT_VALUES_PER_FACET}; use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget, DEFAULT_VALUES_PER_FACET};
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use uuid::Uuid;
use super::super::ranking_rules::{self, RankingRules}; use super::super::ranking_rules::{self, RankingRules};
use super::super::{ use super::super::{
@ -39,6 +40,7 @@ pub async fn perform_federated_search(
federation: Federation, federation: Federation,
features: RoFeatures, features: RoFeatures,
is_proxy: bool, is_proxy: bool,
request_uid: Uuid,
) -> Result<FederatedSearchResult, ResponseError> { ) -> Result<FederatedSearchResult, ResponseError> {
if is_proxy { if is_proxy {
features.check_network("Performing a remote federated search")?; features.check_network("Performing a remote federated search")?;
@ -170,6 +172,7 @@ pub async fn perform_federated_search(
facet_stats, facet_stats,
facets_by_index, facets_by_index,
remote_errors: partitioned_queries.has_remote.then_some(remote_errors), remote_errors: partitioned_queries.has_remote.then_some(remote_errors),
request_uid: Some(request_uid),
}) })
} }
@ -439,6 +442,7 @@ fn merge_metadata(
degraded: degraded_for_host, degraded: degraded_for_host,
used_negative_operator: host_used_negative_operator, used_negative_operator: host_used_negative_operator,
remote_errors: _, remote_errors: _,
request_uid: _,
} in remote_results } in remote_results
{ {
let this_remote_duration = Duration::from_millis(*processing_time_ms as u64); let this_remote_duration = Duration::from_millis(*processing_time_ms as u64);

View File

@ -16,6 +16,7 @@ use meilisearch_types::milli::order_by_map::OrderByMap;
use meilisearch_types::milli::OrderBy; use meilisearch_types::milli::OrderBy;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use utoipa::ToSchema; use utoipa::ToSchema;
use uuid::Uuid;
use super::super::{ComputedFacets, FacetStats, HitsInfo, SearchHit, SearchQueryWithIndex}; use super::super::{ComputedFacets, FacetStats, HitsInfo, SearchHit, SearchQueryWithIndex};
use crate::milli::vector::Embedding; use crate::milli::vector::Embedding;
@ -131,6 +132,8 @@ pub struct FederatedSearchResult {
pub facet_stats: Option<BTreeMap<String, FacetStats>>, pub facet_stats: Option<BTreeMap<String, FacetStats>>,
#[serde(default, skip_serializing_if = "FederatedFacets::is_empty")] #[serde(default, skip_serializing_if = "FederatedFacets::is_empty")]
pub facets_by_index: FederatedFacets, pub facets_by_index: FederatedFacets,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub request_uid: Option<Uuid>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub remote_errors: Option<BTreeMap<String, ResponseError>>, pub remote_errors: Option<BTreeMap<String, ResponseError>>,
@ -156,6 +159,7 @@ impl fmt::Debug for FederatedSearchResult {
facet_stats, facet_stats,
facets_by_index, facets_by_index,
remote_errors, remote_errors,
request_uid,
} = self; } = self;
let mut debug = f.debug_struct("SearchResult"); let mut debug = f.debug_struct("SearchResult");
@ -188,6 +192,9 @@ impl fmt::Debug for FederatedSearchResult {
if let Some(remote_errors) = remote_errors { if let Some(remote_errors) = remote_errors {
debug.field("remote_errors", &remote_errors); debug.field("remote_errors", &remote_errors);
} }
if let Some(request_uid) = request_uid {
debug.field("request_uid", &request_uid);
}
debug.finish() debug.finish()
} }

View File

@ -36,6 +36,7 @@ use serde_json::{json, Value};
#[cfg(test)] #[cfg(test)]
mod mod_test; mod mod_test;
use utoipa::ToSchema; use utoipa::ToSchema;
use uuid::Uuid;
use crate::error::MeilisearchHttpError; use crate::error::MeilisearchHttpError;
@ -851,6 +852,8 @@ pub struct SearchResult {
pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>, pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub facet_stats: Option<BTreeMap<String, FacetStats>>, pub facet_stats: Option<BTreeMap<String, FacetStats>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub request_uid: Option<Uuid>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub semantic_hit_count: Option<u32>, pub semantic_hit_count: Option<u32>,
@ -872,6 +875,7 @@ impl fmt::Debug for SearchResult {
hits_info, hits_info,
facet_distribution, facet_distribution,
facet_stats, facet_stats,
request_uid,
semantic_hit_count, semantic_hit_count,
degraded, degraded,
used_negative_operator, used_negative_operator,
@ -901,6 +905,9 @@ impl fmt::Debug for SearchResult {
if let Some(semantic_hit_count) = semantic_hit_count { if let Some(semantic_hit_count) = semantic_hit_count {
debug.field("semantic_hit_count", &semantic_hit_count); debug.field("semantic_hit_count", &semantic_hit_count);
} }
if let Some(request_uid) = request_uid {
debug.field("request_uid", &request_uid);
}
debug.finish() debug.finish()
} }
@ -1120,6 +1127,7 @@ pub fn perform_search(
search_kind: SearchKind, search_kind: SearchKind,
retrieve_vectors: RetrieveVectors, retrieve_vectors: RetrieveVectors,
features: RoFeatures, features: RoFeatures,
request_uid: Uuid,
) -> Result<SearchResult, ResponseError> { ) -> Result<SearchResult, ResponseError> {
let before_search = Instant::now(); let before_search = Instant::now();
let rtxn = index.read_txn()?; let rtxn = index.read_txn()?;
@ -1237,6 +1245,7 @@ pub fn perform_search(
degraded, degraded,
used_negative_operator, used_negative_operator,
semantic_hit_count, semantic_hit_count,
request_uid: Some(request_uid),
}; };
Ok(result) Ok(result)
} }