mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-18 04:11:07 +00:00
Compare commits
6 Commits
prototype-
...
prototype-
Author | SHA1 | Date | |
---|---|---|---|
56fd9637cd | |||
1bb8ace177 | |||
e343f9a337 | |||
0725e51335 | |||
0bf35c7cc6 | |||
541d38d9d4 |
@ -1,7 +1,13 @@
|
||||
use vergen::{vergen, Config};
|
||||
use vergen::{vergen, Config, SemverKind};
|
||||
|
||||
fn main() {
|
||||
if let Err(e) = vergen(Config::default()) {
|
||||
let mut config = Config::default();
|
||||
// allow using non-annotated tags
|
||||
*config.git_mut().semver_kind_mut() = SemverKind::Lightweight;
|
||||
// add -dirty suffix when we're not right on the tag
|
||||
*config.git_mut().semver_dirty_mut() = Some("-dirty");
|
||||
|
||||
if let Err(e) = vergen(config) {
|
||||
println!("cargo:warning=vergen: {}", e);
|
||||
}
|
||||
|
||||
|
@ -26,6 +26,18 @@ impl SearchAggregator {
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MultiSearchAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl MultiSearchAggregator {
|
||||
pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self) {}
|
||||
}
|
||||
|
||||
impl MockAnalytics {
|
||||
#[allow(clippy::new_ret_no_self)]
|
||||
pub fn new(opt: &Opt) -> Arc<dyn Analytics> {
|
||||
@ -43,6 +55,7 @@ impl Analytics for MockAnalytics {
|
||||
fn publish(&self, _event_name: String, _send: Value, _request: Option<&HttpRequest>) {}
|
||||
fn get_search(&self, _aggregate: super::SearchAggregator) {}
|
||||
fn post_search(&self, _aggregate: super::SearchAggregator) {}
|
||||
fn post_multi_search(&self, _aggregate: super::MultiSearchAggregator) {}
|
||||
fn add_documents(
|
||||
&self,
|
||||
_documents_query: &UpdateDocumentsQuery,
|
||||
|
@ -23,6 +23,8 @@ use crate::routes::tasks::TasksFilterQuery;
|
||||
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
||||
|
||||
// if we are in release mode and the feature analytics was enabled
|
||||
// we use the real analytics
|
||||
@ -30,6 +32,8 @@ pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
||||
|
||||
/// The Meilisearch config dir:
|
||||
/// `~/.config/Meilisearch` on *NIX or *BSD.
|
||||
@ -74,6 +78,9 @@ pub trait Analytics: Sync + Send {
|
||||
/// This method should be called to aggregate a post search
|
||||
fn post_search(&self, aggregate: SearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post array of searches
|
||||
fn post_multi_search(&self, aggregate: MultiSearchAggregator);
|
||||
|
||||
// this method should be called to aggregate a add documents request
|
||||
fn add_documents(
|
||||
&self,
|
||||
|
@ -30,7 +30,7 @@ use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||
use crate::routes::tasks::TasksFilterQuery;
|
||||
use crate::routes::{create_all_stats, Stats};
|
||||
use crate::search::{
|
||||
SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
SearchQuery, SearchQueryWithIndex, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
};
|
||||
use crate::Opt;
|
||||
@ -68,6 +68,7 @@ pub enum AnalyticsMsg {
|
||||
BatchMessage(Track),
|
||||
AggregateGetSearch(SearchAggregator),
|
||||
AggregatePostSearch(SearchAggregator),
|
||||
AggregatePostMultiSearch(MultiSearchAggregator),
|
||||
AggregateAddDocuments(DocumentsAggregator),
|
||||
AggregateDeleteDocuments(DocumentsDeletionAggregator),
|
||||
AggregateUpdateDocuments(DocumentsAggregator),
|
||||
@ -133,6 +134,7 @@ impl SegmentAnalytics {
|
||||
opt: opt.clone(),
|
||||
batcher,
|
||||
post_search_aggregator: SearchAggregator::default(),
|
||||
post_multi_search_aggregator: MultiSearchAggregator::default(),
|
||||
get_search_aggregator: SearchAggregator::default(),
|
||||
add_documents_aggregator: DocumentsAggregator::default(),
|
||||
delete_documents_aggregator: DocumentsDeletionAggregator::default(),
|
||||
@ -174,6 +176,10 @@ impl super::Analytics for SegmentAnalytics {
|
||||
let _ = self.sender.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
|
||||
}
|
||||
|
||||
fn post_multi_search(&self, aggregate: MultiSearchAggregator) {
|
||||
let _ = self.sender.try_send(AnalyticsMsg::AggregatePostMultiSearch(aggregate));
|
||||
}
|
||||
|
||||
fn add_documents(
|
||||
&self,
|
||||
documents_query: &UpdateDocumentsQuery,
|
||||
@ -324,6 +330,7 @@ pub struct Segment {
|
||||
batcher: AutoBatcher,
|
||||
get_search_aggregator: SearchAggregator,
|
||||
post_search_aggregator: SearchAggregator,
|
||||
post_multi_search_aggregator: MultiSearchAggregator,
|
||||
add_documents_aggregator: DocumentsAggregator,
|
||||
delete_documents_aggregator: DocumentsDeletionAggregator,
|
||||
update_documents_aggregator: DocumentsAggregator,
|
||||
@ -381,6 +388,7 @@ impl Segment {
|
||||
Some(AnalyticsMsg::BatchMessage(msg)) => drop(self.batcher.push(msg).await),
|
||||
Some(AnalyticsMsg::AggregateGetSearch(agreg)) => self.get_search_aggregator.aggregate(agreg),
|
||||
Some(AnalyticsMsg::AggregatePostSearch(agreg)) => self.post_search_aggregator.aggregate(agreg),
|
||||
Some(AnalyticsMsg::AggregatePostMultiSearch(agreg)) => self.post_multi_search_aggregator.aggregate(agreg),
|
||||
Some(AnalyticsMsg::AggregateAddDocuments(agreg)) => self.add_documents_aggregator.aggregate(agreg),
|
||||
Some(AnalyticsMsg::AggregateDeleteDocuments(agreg)) => self.delete_documents_aggregator.aggregate(agreg),
|
||||
Some(AnalyticsMsg::AggregateUpdateDocuments(agreg)) => self.update_documents_aggregator.aggregate(agreg),
|
||||
@ -401,12 +409,19 @@ impl Segment {
|
||||
if let Ok(stats) =
|
||||
create_all_stats(index_scheduler.into(), auth_controller, &SearchRules::default())
|
||||
{
|
||||
// Replace the version number with the prototype name if any.
|
||||
let version = if let Some(prototype) = crate::prototype_name() {
|
||||
prototype
|
||||
} else {
|
||||
env!("CARGO_PKG_VERSION")
|
||||
};
|
||||
|
||||
let _ = self
|
||||
.batcher
|
||||
.push(Identify {
|
||||
context: Some(json!({
|
||||
"app": {
|
||||
"version": env!("CARGO_PKG_VERSION").to_string(),
|
||||
"version": version.to_string(),
|
||||
},
|
||||
})),
|
||||
user: self.user.clone(),
|
||||
@ -419,6 +434,8 @@ impl Segment {
|
||||
.into_event(&self.user, "Documents Searched GET");
|
||||
let post_search = std::mem::take(&mut self.post_search_aggregator)
|
||||
.into_event(&self.user, "Documents Searched POST");
|
||||
let post_multi_search = std::mem::take(&mut self.post_multi_search_aggregator)
|
||||
.into_event(&self.user, "Documents Searched By Array of Queries POST");
|
||||
let add_documents = std::mem::take(&mut self.add_documents_aggregator)
|
||||
.into_event(&self.user, "Documents Added");
|
||||
let delete_documents = std::mem::take(&mut self.delete_documents_aggregator)
|
||||
@ -436,6 +453,9 @@ impl Segment {
|
||||
if let Some(post_search) = post_search {
|
||||
let _ = self.batcher.push(post_search).await;
|
||||
}
|
||||
if let Some(post_multi_search) = post_multi_search {
|
||||
let _ = self.batcher.push(post_multi_search).await;
|
||||
}
|
||||
if let Some(add_documents) = add_documents {
|
||||
let _ = self.batcher.push(add_documents).await;
|
||||
}
|
||||
@ -709,6 +729,117 @@ impl SearchAggregator {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MultiSearchAggregator {
|
||||
timestamp: Option<OffsetDateTime>,
|
||||
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
|
||||
// sum of the number of distinct indexes in each single request, use with total_received to compute an avg
|
||||
total_distinct_index_count: usize,
|
||||
// number of queries with a single index, use with total_received to compute a proportion
|
||||
total_single_index: usize,
|
||||
|
||||
// sum of the number of search queries in the requests, use with total_received to compute an average
|
||||
total_search_count: usize,
|
||||
|
||||
// context
|
||||
user_agents: HashSet<String>,
|
||||
}
|
||||
|
||||
impl MultiSearchAggregator {
|
||||
pub fn from_queries(query: &[SearchQueryWithIndex], request: &HttpRequest) -> Self {
|
||||
let timestamp = Some(OffsetDateTime::now_utc());
|
||||
|
||||
let user_agents = extract_user_agents(request).into_iter().collect();
|
||||
|
||||
let distinct_indexes: HashSet<_> = query.iter().map(|query| &query.index_uid).collect();
|
||||
|
||||
Self {
|
||||
timestamp,
|
||||
total_received: 1,
|
||||
total_succeeded: 0,
|
||||
total_distinct_index_count: distinct_indexes.len(),
|
||||
total_single_index: if distinct_indexes.len() == 1 { 1 } else { 0 },
|
||||
total_search_count: query.len(),
|
||||
user_agents,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self) {
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
}
|
||||
|
||||
pub fn aggregate(&mut self, other: Self) {
|
||||
// write the aggregate in a way that will cause a compilation error if a field is added.
|
||||
|
||||
// get ownership of self, replacing it by a default value.
|
||||
let this = std::mem::take(self);
|
||||
|
||||
let timestamp = this.timestamp.or(other.timestamp);
|
||||
let total_received = this.total_received.saturating_add(other.total_received);
|
||||
let total_succeeded = this.total_succeeded.saturating_add(other.total_succeeded);
|
||||
let total_distinct_index_count =
|
||||
this.total_distinct_index_count.saturating_add(other.total_distinct_index_count);
|
||||
let total_single_index = this.total_single_index.saturating_add(other.total_single_index);
|
||||
let total_search_count = this.total_search_count.saturating_add(other.total_search_count);
|
||||
let mut user_agents = this.user_agents;
|
||||
|
||||
for user_agent in other.user_agents.into_iter() {
|
||||
user_agents.insert(user_agent);
|
||||
}
|
||||
|
||||
// need all fields or compile error
|
||||
let mut aggregated = Self {
|
||||
timestamp,
|
||||
total_received,
|
||||
total_succeeded,
|
||||
total_distinct_index_count,
|
||||
total_single_index,
|
||||
total_search_count,
|
||||
user_agents,
|
||||
// do not add _ or ..Default::default() here
|
||||
};
|
||||
|
||||
// replace the default self with the aggregated value
|
||||
std::mem::swap(self, &mut aggregated);
|
||||
}
|
||||
|
||||
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
||||
if self.total_received == 0 {
|
||||
None
|
||||
} else {
|
||||
let properties = json!({
|
||||
"user-agent": self.user_agents,
|
||||
"requests": {
|
||||
"total_succeeded": self.total_succeeded,
|
||||
"total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics
|
||||
"total_received": self.total_received,
|
||||
},
|
||||
"indexes": {
|
||||
"total_single_index": self.total_single_index,
|
||||
"total_distinct_index_count": self.total_distinct_index_count,
|
||||
"avg_distinct_index_count": (self.total_distinct_index_count as f64) / (self.total_received as f64), // not 0 else returned early
|
||||
},
|
||||
"searches": {
|
||||
"total_search_count": self.total_search_count,
|
||||
"avg_search_count": (self.total_search_count as f64) / (self.total_received as f64),
|
||||
}
|
||||
});
|
||||
|
||||
Some(Track {
|
||||
timestamp: self.timestamp,
|
||||
user: user.clone(),
|
||||
event: event_name.to_string(),
|
||||
properties,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DocumentsAggregator {
|
||||
timestamp: Option<OffsetDateTime>,
|
||||
|
@ -427,3 +427,35 @@ pub fn configure_metrics_route(config: &mut web::ServiceConfig, enable_metrics_r
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses the output of
|
||||
/// [`VERGEN_GIT_SEMVER_LIGHTWEIGHT`](https://docs.rs/vergen/latest/vergen/struct.Git.html#instructions)
|
||||
/// as a prototype name.
|
||||
///
|
||||
/// Returns `Some(prototype_name)` if the following conditions are met on this value:
|
||||
///
|
||||
/// 1. starts with `prototype-`,
|
||||
/// 2. does not end with `dirty-`,
|
||||
/// 3. ends with `-<some_number>`,
|
||||
/// 4. does not end with `<some_number>-<some_number>`.
|
||||
///
|
||||
/// Otherwise, returns `None`.
|
||||
pub fn prototype_name() -> Option<&'static str> {
|
||||
let prototype: &'static str = option_env!("VERGEN_GIT_SEMVER_LIGHTWEIGHT")?;
|
||||
|
||||
if prototype.ends_with("-dirty") {
|
||||
return None;
|
||||
}
|
||||
|
||||
if !prototype.starts_with("prototype-") {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut rsplit_prototype = prototype.rsplit('-');
|
||||
// last component MUST be a number
|
||||
rsplit_prototype.next()?.parse::<u64>().ok()?;
|
||||
// before than last component SHALL NOT be a number
|
||||
rsplit_prototype.next()?.parse::<u64>().err()?;
|
||||
|
||||
Some(prototype)
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use actix_web::web::Data;
|
||||
use actix_web::HttpServer;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch::analytics::Analytics;
|
||||
use meilisearch::{analytics, create_app, setup_meilisearch, Opt};
|
||||
use meilisearch::{analytics, create_app, prototype_name, setup_meilisearch, Opt};
|
||||
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
|
||||
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
||||
|
||||
@ -137,6 +137,9 @@ pub fn print_launch_resume(
|
||||
eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string());
|
||||
eprintln!("Commit date:\t\t{:?}", commit_date.to_string());
|
||||
eprintln!("Package version:\t{:?}", env!("CARGO_PKG_VERSION").to_string());
|
||||
if let Some(prototype) = prototype_name() {
|
||||
eprintln!("Prototype:\t\t{:?}", prototype);
|
||||
}
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
{
|
||||
|
@ -2,7 +2,6 @@ use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
@ -18,9 +17,9 @@ use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::query_parameters::QueryParameter;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::search::{
|
||||
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
DEFAULT_SEARCH_OFFSET,
|
||||
add_search_rules, perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH,
|
||||
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
||||
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||
};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
@ -102,26 +101,6 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
}
|
||||
}
|
||||
|
||||
/// Incorporate search rules in search query
|
||||
fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) {
|
||||
query.filter = match (query.filter.take(), rules.filter) {
|
||||
(None, rules_filter) => rules_filter,
|
||||
(filter, None) => filter,
|
||||
(Some(filter), Some(rules_filter)) => {
|
||||
let filter = match filter {
|
||||
Value::Array(filter) => filter,
|
||||
filter => vec![filter],
|
||||
};
|
||||
let rules_filter = match rules_filter {
|
||||
Value::Array(rules_filter) => rules_filter,
|
||||
rules_filter => vec![rules_filter],
|
||||
};
|
||||
|
||||
Some(Value::Array([filter, rules_filter].concat()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: TAMO: split on :asc, and :desc, instead of doing some weird things
|
||||
|
||||
/// Transform the sort query parameter into something that matches the post expected format.
|
||||
|
@ -31,6 +31,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
||||
.service(web::resource("/version").route(web::get().to(get_version)))
|
||||
.service(web::scope("/indexes").configure(indexes::configure))
|
||||
.service(web::scope("/search").configure(multi_search::configure))
|
||||
.service(web::scope("/swap-indexes").configure(swap_indexes::configure));
|
||||
}
|
||||
|
||||
@ -326,3 +327,5 @@ pub async fn get_health(
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({ "status": "available" })))
|
||||
}
|
||||
|
||||
mod multi_search;
|
||||
|
68
meilisearch/src/routes/multi_search.rs
Normal file
68
meilisearch/src/routes/multi_search.rs
Normal file
@ -0,0 +1,68 @@
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::keys::actions;
|
||||
|
||||
use crate::analytics::{Analytics, MultiSearchAggregator};
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::search::{
|
||||
add_search_rules, perform_search, SearchQueryWithIndex, SearchResultWithIndex,
|
||||
};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(search_with_post))));
|
||||
}
|
||||
|
||||
pub async fn search_with_post(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||
params: ValidatedJson<Vec<SearchQueryWithIndex>, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let queries = params.into_inner();
|
||||
|
||||
let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req);
|
||||
|
||||
let search_results: Result<_, ResponseError> = (|| {
|
||||
async {
|
||||
let mut search_results = Vec::with_capacity(queries.len());
|
||||
for (index_uid, mut query) in
|
||||
queries.into_iter().map(SearchQueryWithIndex::into_index_query)
|
||||
{
|
||||
debug!("search called with params: {:?}", query);
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) =
|
||||
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let search_result =
|
||||
tokio::task::spawn_blocking(move || perform_search(&index, query)).await?;
|
||||
|
||||
search_results.push(SearchResultWithIndex { index_uid, result: search_result? });
|
||||
}
|
||||
Ok(search_results)
|
||||
}
|
||||
})()
|
||||
.await;
|
||||
|
||||
if search_results.is_ok() {
|
||||
multi_aggregate.succeed();
|
||||
}
|
||||
analytics.post_multi_search(multi_aggregate);
|
||||
|
||||
let search_results = search_results?;
|
||||
|
||||
debug!("returns: {:?}", search_results);
|
||||
|
||||
Ok(HttpResponse::Ok().json(search_results))
|
||||
}
|
@ -5,6 +5,7 @@ use std::time::Instant;
|
||||
|
||||
use deserr::DeserializeFromValue;
|
||||
use either::Either;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
||||
@ -74,6 +75,99 @@ impl SearchQuery {
|
||||
}
|
||||
}
|
||||
|
||||
/// A `SearchQuery` + an index UID.
|
||||
// This struct contains the fields of `SearchQuery` inline.
|
||||
// This is because neither deserr nor serde support `flatten` when using `deny_unknown_fields.
|
||||
// The `From<SearchQueryWithIndex>` implementation ensures both structs remain up to date.
|
||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQueryWithIndex {
|
||||
pub index_uid: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
||||
pub q: Option<String>,
|
||||
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
|
||||
pub offset: usize,
|
||||
#[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrJsonError<InvalidSearchLimit>)]
|
||||
pub limit: usize,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchPage>)]
|
||||
pub page: Option<usize>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHitsPerPage>)]
|
||||
pub hits_per_page: Option<usize>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToRetrieve>)]
|
||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToCrop>)]
|
||||
pub attributes_to_crop: Option<Vec<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
|
||||
pub crop_length: usize,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToHighlight>)]
|
||||
pub attributes_to_highlight: Option<HashSet<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchShowMatchesPosition>, default)]
|
||||
pub show_matches_position: bool,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
|
||||
pub filter: Option<Value>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchSort>)]
|
||||
pub sort: Option<Vec<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFacets>)]
|
||||
pub facets: Option<Vec<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
||||
pub highlight_pre_tag: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
||||
pub highlight_post_tag: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
|
||||
pub crop_marker: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
|
||||
pub matching_strategy: MatchingStrategy,
|
||||
}
|
||||
|
||||
impl SearchQueryWithIndex {
|
||||
pub fn into_index_query(self) -> (String, SearchQuery) {
|
||||
let SearchQueryWithIndex {
|
||||
index_uid,
|
||||
q,
|
||||
offset,
|
||||
limit,
|
||||
page,
|
||||
hits_per_page,
|
||||
attributes_to_retrieve,
|
||||
attributes_to_crop,
|
||||
crop_length,
|
||||
attributes_to_highlight,
|
||||
show_matches_position,
|
||||
filter,
|
||||
sort,
|
||||
facets,
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
crop_marker,
|
||||
matching_strategy,
|
||||
} = self;
|
||||
(
|
||||
index_uid,
|
||||
SearchQuery {
|
||||
q,
|
||||
offset,
|
||||
limit,
|
||||
page,
|
||||
hits_per_page,
|
||||
attributes_to_retrieve,
|
||||
attributes_to_crop,
|
||||
crop_length,
|
||||
attributes_to_highlight,
|
||||
show_matches_position,
|
||||
filter,
|
||||
sort,
|
||||
facets,
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
crop_marker,
|
||||
matching_strategy,
|
||||
// do not use ..Default::default() here,
|
||||
// rather add any missing field from `SearchQuery` to `SearchQueryWithIndex`
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[deserr(rename_all = camelCase)]
|
||||
pub enum MatchingStrategy {
|
||||
@ -120,6 +214,14 @@ pub struct SearchResult {
|
||||
pub facet_distribution: Option<BTreeMap<String, BTreeMap<String, u64>>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SearchResultWithIndex {
|
||||
pub index_uid: String,
|
||||
#[serde(flatten)]
|
||||
pub result: SearchResult,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(untagged)]
|
||||
pub enum HitsInfo {
|
||||
@ -129,6 +231,26 @@ pub enum HitsInfo {
|
||||
OffsetLimit { limit: usize, offset: usize, estimated_total_hits: usize },
|
||||
}
|
||||
|
||||
/// Incorporate search rules in search query
|
||||
pub fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) {
|
||||
query.filter = match (query.filter.take(), rules.filter) {
|
||||
(None, rules_filter) => rules_filter,
|
||||
(filter, None) => filter,
|
||||
(Some(filter), Some(rules_filter)) => {
|
||||
let filter = match filter {
|
||||
Value::Array(filter) => filter,
|
||||
filter => vec![filter],
|
||||
};
|
||||
let rules_filter = match rules_filter {
|
||||
Value::Array(rules_filter) => rules_filter,
|
||||
rules_filter => vec![rules_filter],
|
||||
};
|
||||
|
||||
Some(Value::Array([filter, rules_filter].concat()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn perform_search(
|
||||
index: &Index,
|
||||
query: SearchQuery,
|
||||
|
@ -103,6 +103,10 @@ impl Server {
|
||||
Index { uid: uid.as_ref().to_string(), service: &self.service, encoder }
|
||||
}
|
||||
|
||||
pub async fn search(&self, queries: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/search", queries).await
|
||||
}
|
||||
|
||||
pub async fn list_indexes_raw(&self, parameters: &str) -> (Value, StatusCode) {
|
||||
self.service.get(format!("/indexes{parameters}")).await
|
||||
}
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
mod errors;
|
||||
mod formatted;
|
||||
mod multi;
|
||||
mod pagination;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
|
273
meilisearch/tests/search/multi.rs
Normal file
273
meilisearch/tests/search/multi.rs
Normal file
@ -0,0 +1,273 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use serde_json::json;
|
||||
|
||||
use super::{DOCUMENTS, NESTED_DOCUMENTS};
|
||||
use crate::common::Server;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_empty_list() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.search(json!([])).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @"[]");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_json_object() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.search(json!({})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value type: expected an array, but found an object: `{}`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_single_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "test", "q": "captain"},
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
insta::assert_json_snapshot!(response, { "[].processingTimeMs" => "[time]" }, @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "test",
|
||||
"hits": [
|
||||
{
|
||||
"title": "Glass",
|
||||
"id": "450465"
|
||||
}
|
||||
],
|
||||
"query": "glass",
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
},
|
||||
{
|
||||
"indexUid": "test",
|
||||
"hits": [
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537"
|
||||
}
|
||||
],
|
||||
"query": "captain",
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_two_indexes() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
insta::assert_json_snapshot!(response, { "[].processingTimeMs" => "[time]" }, @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "test",
|
||||
"hits": [
|
||||
{
|
||||
"title": "Glass",
|
||||
"id": "450465"
|
||||
}
|
||||
],
|
||||
"query": "glass",
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
},
|
||||
{
|
||||
"indexUid": "nested",
|
||||
"hits": [
|
||||
{
|
||||
"id": 852,
|
||||
"father": "jean",
|
||||
"mother": "michelle",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": 2
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": 4
|
||||
}
|
||||
],
|
||||
"cattos": "pesti"
|
||||
},
|
||||
{
|
||||
"id": 654,
|
||||
"father": "pierre",
|
||||
"mother": "sabine",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "gros bill",
|
||||
"age": 8
|
||||
}
|
||||
],
|
||||
"cattos": [
|
||||
"simba",
|
||||
"pestiféré"
|
||||
]
|
||||
}
|
||||
],
|
||||
"query": "pesti",
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_one_index_doesnt_exist() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"404 Not Found");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `nested` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_multiple_indexes_dont_exist() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"404 Not Found");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_one_query_error() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass", "facets": ["title"]},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, this index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_multiple_query_errors() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass", "facets": ["title"]},
|
||||
{"indexUid": "nested", "q": "pesti", "facets": ["doggos"]},
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, this index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
}
|
||||
"###);
|
||||
}
|
Reference in New Issue
Block a user