mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-10-10 13:46:28 +00:00
refactor: group perform_search parameters into SearchParams struct
- Create SearchParams struct to group related parameters - Update perform_search function to use SearchParams instead of 8 individual parameters - Fix clippy warning about too many arguments - Update all callers to use new SearchParams struct
This commit is contained in:
@@ -24,9 +24,10 @@ use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
|||||||
use crate::routes::indexes::search_analytics::{SearchAggregator, SearchGET, SearchPOST};
|
use crate::routes::indexes::search_analytics::{SearchAggregator, SearchGET, SearchPOST};
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
add_search_rules, perform_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
add_search_rules, perform_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
||||||
RetrieveVectors, SearchKind, SearchQuery, SearchResult, SemanticRatio, DEFAULT_CROP_LENGTH,
|
RetrieveVectors, SearchKind, SearchParams, SearchQuery, SearchResult, SemanticRatio,
|
||||||
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
|
||||||
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, DEFAULT_SEMANTIC_RATIO, INCLUDE_METADATA_HEADER,
|
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, DEFAULT_SEMANTIC_RATIO,
|
||||||
|
INCLUDE_METADATA_HEADER,
|
||||||
};
|
};
|
||||||
use crate::search_queue::SearchQueue;
|
use crate::search_queue::SearchQueue;
|
||||||
|
|
||||||
@@ -345,21 +346,20 @@ pub async fn search_with_url_query(
|
|||||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
||||||
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors);
|
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors);
|
||||||
let permit = search_queue.try_get_search_permit().await?;
|
let permit = search_queue.try_get_search_permit().await?;
|
||||||
let include_metadata = req
|
let include_metadata = req.headers().get(INCLUDE_METADATA_HEADER).is_some();
|
||||||
.headers()
|
|
||||||
.get(INCLUDE_METADATA_HEADER)
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
let search_result = tokio::task::spawn_blocking(move || {
|
let search_result = tokio::task::spawn_blocking(move || {
|
||||||
perform_search(
|
perform_search(
|
||||||
index_uid.to_string(),
|
SearchParams {
|
||||||
&index,
|
index_uid: index_uid.to_string(),
|
||||||
query,
|
query,
|
||||||
search_kind,
|
search_kind,
|
||||||
retrieve_vector,
|
retrieve_vectors: retrieve_vector,
|
||||||
index_scheduler.features(),
|
features: index_scheduler.features(),
|
||||||
request_uid,
|
request_uid,
|
||||||
include_metadata,
|
include_metadata,
|
||||||
|
},
|
||||||
|
&index,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -459,22 +459,21 @@ pub async fn search_with_post(
|
|||||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
||||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
|
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
|
||||||
|
|
||||||
let include_metadata = req
|
let include_metadata = req.headers().get(INCLUDE_METADATA_HEADER).is_some();
|
||||||
.headers()
|
|
||||||
.get(INCLUDE_METADATA_HEADER)
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
let permit = search_queue.try_get_search_permit().await?;
|
let permit = search_queue.try_get_search_permit().await?;
|
||||||
let search_result = tokio::task::spawn_blocking(move || {
|
let search_result = tokio::task::spawn_blocking(move || {
|
||||||
perform_search(
|
perform_search(
|
||||||
index_uid.to_string(),
|
SearchParams {
|
||||||
&index,
|
index_uid: index_uid.to_string(),
|
||||||
query,
|
query,
|
||||||
search_kind,
|
search_kind,
|
||||||
retrieve_vectors,
|
retrieve_vectors,
|
||||||
index_scheduler.features(),
|
features: index_scheduler.features(),
|
||||||
request_uid,
|
request_uid,
|
||||||
include_metadata,
|
include_metadata,
|
||||||
|
},
|
||||||
|
&index,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
|
@@ -20,8 +20,8 @@ use crate::extractors::sequential_extractor::SeqHandler;
|
|||||||
use crate::routes::indexes::search::search_kind;
|
use crate::routes::indexes::search::search_kind;
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
add_search_rules, perform_federated_search, perform_search, FederatedSearch,
|
add_search_rules, perform_federated_search, perform_search, FederatedSearch,
|
||||||
FederatedSearchResult, RetrieveVectors, SearchQueryWithIndex, SearchResultWithIndex,
|
FederatedSearchResult, RetrieveVectors, SearchParams, SearchQueryWithIndex,
|
||||||
PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE, INCLUDE_METADATA_HEADER,
|
SearchResultWithIndex, INCLUDE_METADATA_HEADER, PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE,
|
||||||
};
|
};
|
||||||
use crate::search_queue::SearchQueue;
|
use crate::search_queue::SearchQueue;
|
||||||
|
|
||||||
@@ -202,10 +202,7 @@ pub async fn multi_search_with_post(
|
|||||||
.headers()
|
.headers()
|
||||||
.get(PROXY_SEARCH_HEADER)
|
.get(PROXY_SEARCH_HEADER)
|
||||||
.is_some_and(|value| value.as_bytes() == PROXY_SEARCH_HEADER_VALUE.as_bytes());
|
.is_some_and(|value| value.as_bytes() == PROXY_SEARCH_HEADER_VALUE.as_bytes());
|
||||||
let include_metadata = req
|
let include_metadata = req.headers().get(INCLUDE_METADATA_HEADER).is_some();
|
||||||
.headers()
|
|
||||||
.get(INCLUDE_METADATA_HEADER)
|
|
||||||
.is_some();
|
|
||||||
let search_result = perform_federated_search(
|
let search_result = perform_federated_search(
|
||||||
&index_scheduler,
|
&index_scheduler,
|
||||||
queries,
|
queries,
|
||||||
@@ -233,10 +230,7 @@ pub async fn multi_search_with_post(
|
|||||||
HttpResponse::Ok().json(search_result?)
|
HttpResponse::Ok().json(search_result?)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let include_metadata = req
|
let include_metadata = req.headers().get(INCLUDE_METADATA_HEADER).is_some();
|
||||||
.headers()
|
|
||||||
.get(INCLUDE_METADATA_HEADER)
|
|
||||||
.is_some();
|
|
||||||
|
|
||||||
// Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only,
|
// Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only,
|
||||||
// so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code
|
// so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code
|
||||||
@@ -289,14 +283,16 @@ pub async fn multi_search_with_post(
|
|||||||
|
|
||||||
let search_result = tokio::task::spawn_blocking(move || {
|
let search_result = tokio::task::spawn_blocking(move || {
|
||||||
perform_search(
|
perform_search(
|
||||||
index_uid_str.clone(),
|
SearchParams {
|
||||||
&index,
|
index_uid: index_uid_str.clone(),
|
||||||
query,
|
query,
|
||||||
search_kind,
|
search_kind,
|
||||||
retrieve_vector,
|
retrieve_vectors: retrieve_vector,
|
||||||
features,
|
features,
|
||||||
request_uid,
|
request_uid,
|
||||||
include_metadata,
|
include_metadata,
|
||||||
|
},
|
||||||
|
&index,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
|
@@ -4,7 +4,7 @@ mod types;
|
|||||||
mod weighted_scores;
|
mod weighted_scores;
|
||||||
|
|
||||||
pub use perform::perform_federated_search;
|
pub use perform::perform_federated_search;
|
||||||
pub use proxy::{PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE, INCLUDE_METADATA_HEADER};
|
pub use proxy::{INCLUDE_METADATA_HEADER, PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE};
|
||||||
pub use types::{
|
pub use types::{
|
||||||
FederatedSearch, FederatedSearchResult, Federation, FederationOptions, MergeFacets,
|
FederatedSearch, FederatedSearchResult, Federation, FederationOptions, MergeFacets,
|
||||||
};
|
};
|
||||||
|
@@ -118,11 +118,11 @@ pub async fn perform_federated_search(
|
|||||||
let after_waiting_remote_results = std::time::Instant::now();
|
let after_waiting_remote_results = std::time::Instant::now();
|
||||||
|
|
||||||
// 3. merge hits and metadata across indexes and hosts
|
// 3. merge hits and metadata across indexes and hosts
|
||||||
// 3.1. merge metadata
|
// 3.1. merge federation metadata
|
||||||
let (estimated_total_hits, degraded, used_negative_operator, facets, max_remote_duration) =
|
let (estimated_total_hits, degraded, used_negative_operator, facets, max_remote_duration) =
|
||||||
merge_metadata(&mut results_by_index, &remote_results);
|
merge_metadata(&mut results_by_index, &remote_results);
|
||||||
|
|
||||||
// 3.1.1. Build metadata in the same order as the original queries
|
// 3.2. Build metadata in the same order as the original queries
|
||||||
let query_metadata = if include_metadata {
|
let query_metadata = if include_metadata {
|
||||||
let mut query_metadata = Vec::new();
|
let mut query_metadata = Vec::new();
|
||||||
|
|
||||||
@@ -132,11 +132,15 @@ pub async fn perform_federated_search(
|
|||||||
for remote_result in &remote_results {
|
for remote_result in &remote_results {
|
||||||
if let Some(remote_metadata) = &remote_result.metadata {
|
if let Some(remote_metadata) = &remote_result.metadata {
|
||||||
for remote_meta in remote_metadata {
|
for remote_meta in remote_metadata {
|
||||||
if let Some(remote_name) = &remote_meta.remote {
|
if let SearchMetadata {
|
||||||
let key = (remote_name.clone(), remote_meta.index_uid.clone());
|
remote: Some(remote_name),
|
||||||
if let Some(primary_key) = &remote_meta.primary_key {
|
primary_key: Some(primary_key),
|
||||||
remote_primary_keys.insert(key, primary_key.clone());
|
index_uid,
|
||||||
}
|
..
|
||||||
|
} = &remote_meta
|
||||||
|
{
|
||||||
|
let key = (remote_name, index_uid);
|
||||||
|
remote_primary_keys.insert(key, primary_key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -156,8 +160,8 @@ pub async fn perform_federated_search(
|
|||||||
Some(remote_name) => {
|
Some(remote_name) => {
|
||||||
// For remote queries, try to get primary key from remote results
|
// For remote queries, try to get primary key from remote results
|
||||||
// Use composite key (remote, index_uid) to avoid collisions
|
// Use composite key (remote, index_uid) to avoid collisions
|
||||||
let lookup_key = (remote_name.clone(), index_uid.clone());
|
let lookup_key = (remote_name, &index_uid);
|
||||||
remote_primary_keys.get(&lookup_key).cloned()
|
remote_primary_keys.get(&lookup_key).map(|pk| pk.to_string())
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// For local queries, get primary key from local index
|
// For local queries, get primary key from local index
|
||||||
|
@@ -43,7 +43,8 @@ use crate::error::MeilisearchHttpError;
|
|||||||
mod federated;
|
mod federated;
|
||||||
pub use federated::{
|
pub use federated::{
|
||||||
perform_federated_search, FederatedSearch, FederatedSearchResult, Federation,
|
perform_federated_search, FederatedSearch, FederatedSearchResult, Federation,
|
||||||
FederationOptions, MergeFacets, PROXY_SEARCH_HEADER, PROXY_SEARCH_HEADER_VALUE, INCLUDE_METADATA_HEADER,
|
FederationOptions, MergeFacets, INCLUDE_METADATA_HEADER, PROXY_SEARCH_HEADER,
|
||||||
|
PROXY_SEARCH_HEADER_VALUE,
|
||||||
};
|
};
|
||||||
|
|
||||||
mod ranking_rules;
|
mod ranking_rules;
|
||||||
@@ -1138,16 +1139,26 @@ pub fn prepare_search<'t>(
|
|||||||
Ok((search, is_finite_pagination, max_total_hits, offset))
|
Ok((search, is_finite_pagination, max_total_hits, offset))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn perform_search(
|
pub struct SearchParams {
|
||||||
index_uid: String,
|
pub index_uid: String,
|
||||||
index: &Index,
|
pub query: SearchQuery,
|
||||||
query: SearchQuery,
|
pub search_kind: SearchKind,
|
||||||
search_kind: SearchKind,
|
pub retrieve_vectors: RetrieveVectors,
|
||||||
retrieve_vectors: RetrieveVectors,
|
pub features: RoFeatures,
|
||||||
features: RoFeatures,
|
pub request_uid: Uuid,
|
||||||
request_uid: Uuid,
|
pub include_metadata: bool,
|
||||||
include_metadata: bool,
|
}
|
||||||
) -> Result<SearchResult, ResponseError> {
|
|
||||||
|
pub fn perform_search(params: SearchParams, index: &Index) -> Result<SearchResult, ResponseError> {
|
||||||
|
let SearchParams {
|
||||||
|
index_uid,
|
||||||
|
query,
|
||||||
|
search_kind,
|
||||||
|
retrieve_vectors,
|
||||||
|
features,
|
||||||
|
request_uid,
|
||||||
|
include_metadata,
|
||||||
|
} = params;
|
||||||
let before_search = Instant::now();
|
let before_search = Instant::now();
|
||||||
let index_uid_for_metadata = index_uid.clone();
|
let index_uid_for_metadata = index_uid.clone();
|
||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
|
Reference in New Issue
Block a user