mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-09-06 04:36:32 +00:00
Merge branch 'main' into fragment-filters
This commit is contained in:
@ -223,8 +223,8 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc<IndexScheduler>, Arc<
|
||||
indexes_path: opt.db_path.join("indexes"),
|
||||
snapshots_path: opt.snapshot_dir.clone(),
|
||||
dumps_path: opt.dump_dir.clone(),
|
||||
webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()),
|
||||
webhook_authorization_header: opt.task_webhook_authorization_header.clone(),
|
||||
cli_webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()),
|
||||
cli_webhook_authorization: opt.task_webhook_authorization_header.clone(),
|
||||
task_db_size: opt.max_task_db_size.as_u64() as usize,
|
||||
index_base_map_size: opt.max_index_size.as_u64() as usize,
|
||||
enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage,
|
||||
@ -491,7 +491,12 @@ fn import_dump(
|
||||
let _ = std::fs::write(db_path.join("instance-uid"), instance_uid.to_string().as_bytes());
|
||||
};
|
||||
|
||||
// 2. Import the `Key`s.
|
||||
// 2. Import the webhooks
|
||||
if let Some(webhooks) = dump_reader.webhooks() {
|
||||
index_scheduler.update_runtime_webhooks(webhooks.webhooks.clone())?;
|
||||
}
|
||||
|
||||
// 3. Import the `Key`s.
|
||||
let mut keys = Vec::new();
|
||||
auth.raw_delete_all_keys()?;
|
||||
for key in dump_reader.keys()? {
|
||||
@ -500,20 +505,20 @@ fn import_dump(
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
// 3. Import the `ChatCompletionSettings`s.
|
||||
// 4. Import the `ChatCompletionSettings`s.
|
||||
for result in dump_reader.chat_completions_settings()? {
|
||||
let (name, settings) = result?;
|
||||
index_scheduler.put_chat_settings(&name, &settings)?;
|
||||
}
|
||||
|
||||
// 4. Import the runtime features and network
|
||||
// 5. Import the runtime features and network
|
||||
let features = dump_reader.features()?.unwrap_or_default();
|
||||
index_scheduler.put_runtime_features(features)?;
|
||||
|
||||
let network = dump_reader.network()?.cloned().unwrap_or_default();
|
||||
index_scheduler.put_network(network)?;
|
||||
|
||||
// 4.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
// 5.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
let backup_config;
|
||||
let base_config = index_scheduler.indexer_config();
|
||||
|
||||
@ -530,7 +535,7 @@ fn import_dump(
|
||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||
// try to process tasks while we're trying to import the indexes.
|
||||
|
||||
// 5. Import the indexes.
|
||||
// 6. Import the indexes.
|
||||
for index_reader in dump_reader.indexes()? {
|
||||
let mut index_reader = index_reader?;
|
||||
let metadata = index_reader.metadata();
|
||||
@ -543,12 +548,12 @@ fn import_dump(
|
||||
let mut wtxn = index.write_txn()?;
|
||||
|
||||
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
|
||||
// 5.1 Import the primary key if there is one.
|
||||
// 6.1 Import the primary key if there is one.
|
||||
if let Some(ref primary_key) = metadata.primary_key {
|
||||
builder.set_primary_key(primary_key.to_string());
|
||||
}
|
||||
|
||||
// 5.2 Import the settings.
|
||||
// 6.2 Import the settings.
|
||||
tracing::info!("Importing the settings.");
|
||||
let settings = index_reader.settings()?;
|
||||
apply_settings_to_builder(&settings, &mut builder);
|
||||
@ -560,8 +565,8 @@ fn import_dump(
|
||||
let rtxn = index.read_txn()?;
|
||||
|
||||
if index_scheduler.no_edition_2024_for_dumps() {
|
||||
// 5.3 Import the documents.
|
||||
// 5.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||
// 6.3 Import the documents.
|
||||
// 6.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||
tracing::info!("Importing the documents.");
|
||||
let file = tempfile::tempfile()?;
|
||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||
@ -572,7 +577,7 @@ fn import_dump(
|
||||
// This flush the content of the batch builder.
|
||||
let file = builder.into_inner()?.into_inner()?;
|
||||
|
||||
// 5.3.2 We feed it to the milli index.
|
||||
// 6.3.2 We feed it to the milli index.
|
||||
let reader = BufReader::new(file);
|
||||
let reader = DocumentsBatchReader::from_reader(reader)?;
|
||||
|
||||
@ -651,15 +656,15 @@ fn import_dump(
|
||||
index_scheduler.refresh_index_stats(&uid)?;
|
||||
}
|
||||
|
||||
// 6. Import the queue
|
||||
// 7. Import the queue
|
||||
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
||||
// 6.1. Import the batches
|
||||
// 7.1. Import the batches
|
||||
for ret in dump_reader.batches()? {
|
||||
let batch = ret?;
|
||||
index_scheduler_dump.register_dumped_batch(batch)?;
|
||||
}
|
||||
|
||||
// 6.2. Import the tasks
|
||||
// 7.2. Import the tasks
|
||||
for ret in dump_reader.tasks()? {
|
||||
let (task, file) = ret?;
|
||||
index_scheduler_dump.register_dumped_task(task, file)?;
|
||||
|
@ -206,11 +206,13 @@ pub struct Opt {
|
||||
pub env: String,
|
||||
|
||||
/// Called whenever a task finishes so a third party can be notified.
|
||||
/// See also the dedicated API `/webhooks`.
|
||||
#[clap(long, env = MEILI_TASK_WEBHOOK_URL)]
|
||||
pub task_webhook_url: Option<Url>,
|
||||
|
||||
/// The Authorization header to send on the webhook URL whenever
|
||||
/// a task finishes so a third party can be notified.
|
||||
/// See also the dedicated API `/webhooks`.
|
||||
#[clap(long, env = MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER)]
|
||||
pub task_webhook_authorization_header: Option<String>,
|
||||
|
||||
|
@ -226,6 +226,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
let SearchResult {
|
||||
hits: _,
|
||||
query: _,
|
||||
query_vector: _,
|
||||
processing_time_ms,
|
||||
hits_info: _,
|
||||
semantic_hit_count: _,
|
||||
|
@ -511,7 +511,7 @@ make_setting_routes!(
|
||||
},
|
||||
{
|
||||
route: "/chat",
|
||||
update_verb: put,
|
||||
update_verb: patch,
|
||||
value_type: ChatSettings,
|
||||
err_type: meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsIndexChat,
|
||||
|
@ -41,6 +41,7 @@ use crate::routes::indexes::IndexView;
|
||||
use crate::routes::multi_search::SearchResults;
|
||||
use crate::routes::network::{Network, Remote};
|
||||
use crate::routes::swap_indexes::SwapIndexesPayload;
|
||||
use crate::routes::webhooks::{WebhookResults, WebhookSettings, WebhookWithMetadata};
|
||||
use crate::search::{
|
||||
FederatedSearch, FederatedSearchResult, Federation, FederationOptions, MergeFacets,
|
||||
SearchQueryWithIndex, SearchResultWithIndex, SimilarQuery, SimilarResult,
|
||||
@ -70,6 +71,7 @@ mod swap_indexes;
|
||||
pub mod tasks;
|
||||
#[cfg(test)]
|
||||
mod tasks_test;
|
||||
mod webhooks;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
@ -89,6 +91,7 @@ mod tasks_test;
|
||||
(path = "/experimental-features", api = features::ExperimentalFeaturesApi),
|
||||
(path = "/export", api = export::ExportApi),
|
||||
(path = "/network", api = network::NetworkApi),
|
||||
(path = "/webhooks", api = webhooks::WebhooksApi),
|
||||
),
|
||||
paths(get_health, get_version, get_stats),
|
||||
tags(
|
||||
@ -99,7 +102,7 @@ mod tasks_test;
|
||||
url = "/",
|
||||
description = "Local server",
|
||||
)),
|
||||
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures, Export))
|
||||
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures, Export, WebhookSettings, WebhookResults, WebhookWithMetadata))
|
||||
)]
|
||||
pub struct MeilisearchApi;
|
||||
|
||||
@ -120,7 +123,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/experimental-features").configure(features::configure))
|
||||
.service(web::scope("/network").configure(network::configure))
|
||||
.service(web::scope("/export").configure(export::configure))
|
||||
.service(web::scope("/chats").configure(chats::configure));
|
||||
.service(web::scope("/chats").configure(chats::configure))
|
||||
.service(web::scope("/webhooks").configure(webhooks::configure));
|
||||
|
||||
#[cfg(feature = "swagger")]
|
||||
{
|
||||
|
@ -51,7 +51,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
get,
|
||||
path = "",
|
||||
tag = "Network",
|
||||
security(("Bearer" = ["network.get", "network.*", "*"])),
|
||||
security(("Bearer" = ["network.get", "*"])),
|
||||
responses(
|
||||
(status = OK, description = "Known nodes are returned", body = Network, content_type = "application/json", example = json!(
|
||||
{
|
||||
@ -168,7 +168,7 @@ impl Aggregate for PatchNetworkAnalytics {
|
||||
path = "",
|
||||
tag = "Network",
|
||||
request_body = Network,
|
||||
security(("Bearer" = ["network.update", "network.*", "*"])),
|
||||
security(("Bearer" = ["network.update", "*"])),
|
||||
responses(
|
||||
(status = OK, description = "New network state is returned", body = Network, content_type = "application/json", example = json!(
|
||||
{
|
||||
|
474
crates/meilisearch/src/routes/webhooks.rs
Normal file
474
crates/meilisearch/src/routes/webhooks.rs
Normal file
@ -0,0 +1,474 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use actix_http::header::{
|
||||
HeaderName, HeaderValue, InvalidHeaderName as ActixInvalidHeaderName,
|
||||
InvalidHeaderValue as ActixInvalidHeaderValue,
|
||||
};
|
||||
use actix_web::web::{self, Data, Path};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use core::convert::Infallible;
|
||||
use deserr::actix_web::AwebJson;
|
||||
use deserr::{DeserializeError, Deserr, ValuePointerRef};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError};
|
||||
use meilisearch_types::error::deserr_codes::{
|
||||
BadRequest, InvalidWebhookHeaders, InvalidWebhookUrl,
|
||||
};
|
||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::webhooks::Webhook;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use WebhooksError::*;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
paths(get_webhooks, get_webhook, post_webhook, patch_webhook, delete_webhook),
|
||||
tags((
|
||||
name = "Webhooks",
|
||||
description = "The `/webhooks` route allows you to register endpoints to be called once tasks are processed.",
|
||||
external_docs(url = "https://www.meilisearch.com/docs/reference/api/webhooks"),
|
||||
)),
|
||||
)]
|
||||
pub struct WebhooksApi;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_webhooks))
|
||||
.route(web::post().to(SeqHandler(post_webhook))),
|
||||
)
|
||||
.service(
|
||||
web::resource("/{uuid}")
|
||||
.route(web::get().to(get_webhook))
|
||||
.route(web::patch().to(SeqHandler(patch_webhook)))
|
||||
.route(web::delete().to(SeqHandler(delete_webhook))),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr, ToSchema)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_webhook)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub(super) struct WebhookSettings {
|
||||
#[schema(value_type = Option<String>, example = "https://your.site/on-tasks-completed")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidWebhookUrl>)]
|
||||
#[serde(default)]
|
||||
url: Setting<String>,
|
||||
#[schema(value_type = Option<BTreeMap<String, String>>, example = json!({"Authorization":"Bearer a-secret-token"}))]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidWebhookHeaders>)]
|
||||
#[serde(default)]
|
||||
headers: Setting<BTreeMap<String, Setting<String>>>,
|
||||
}
|
||||
|
||||
fn deny_immutable_fields_webhook(
|
||||
field: &str,
|
||||
accepted: &[&str],
|
||||
location: ValuePointerRef,
|
||||
) -> DeserrJsonError {
|
||||
match field {
|
||||
"uuid" => immutable_field_error(field, accepted, Code::ImmutableWebhookUuid),
|
||||
"isEditable" => immutable_field_error(field, accepted, Code::ImmutableWebhookIsEditable),
|
||||
_ => deserr::take_cf_content(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||
location,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub(super) struct WebhookWithMetadata {
|
||||
uuid: Uuid,
|
||||
is_editable: bool,
|
||||
#[schema(value_type = WebhookSettings)]
|
||||
#[serde(flatten)]
|
||||
webhook: Webhook,
|
||||
}
|
||||
|
||||
impl WebhookWithMetadata {
|
||||
pub fn from(uuid: Uuid, webhook: Webhook) -> Self {
|
||||
Self { uuid, is_editable: uuid != Uuid::nil(), webhook }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(super) struct WebhookResults {
|
||||
results: Vec<WebhookWithMetadata>,
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "",
|
||||
tag = "Webhooks",
|
||||
security(("Bearer" = ["webhooks.get", "webhooks.*", "*.get", "*"])),
|
||||
responses(
|
||||
(status = OK, description = "Webhooks are returned", body = WebhookResults, content_type = "application/json", example = json!({
|
||||
"results": [
|
||||
{
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"url": "https://your.site/on-tasks-completed",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token"
|
||||
},
|
||||
"isEditable": true
|
||||
},
|
||||
{
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440001",
|
||||
"url": "https://another.site/on-tasks-completed",
|
||||
"isEditable": true
|
||||
}
|
||||
]
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||
"code": "missing_authorization_header",
|
||||
"type": "auth",
|
||||
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||
}
|
||||
)),
|
||||
)
|
||||
)]
|
||||
async fn get_webhooks(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_GET }>, Data<IndexScheduler>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let webhooks = index_scheduler.webhooks_view();
|
||||
let results = webhooks
|
||||
.webhooks
|
||||
.into_iter()
|
||||
.map(|(uuid, webhook)| WebhookWithMetadata::from(uuid, webhook))
|
||||
.collect::<Vec<_>>();
|
||||
let results = WebhookResults { results };
|
||||
|
||||
debug!(returns = ?results, "Get webhooks");
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct PatchWebhooksAnalytics;
|
||||
|
||||
impl Aggregate for PatchWebhooksAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Webhooks Updated"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, _new: Box<Self>) -> Box<Self> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct PostWebhooksAnalytics;
|
||||
|
||||
impl Aggregate for PostWebhooksAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Webhooks Created"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, _new: Box<Self>) -> Box<Self> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum WebhooksError {
|
||||
#[error("The URL for the webhook `{0}` is missing.")]
|
||||
MissingUrl(Uuid),
|
||||
#[error("Defining too many webhooks would crush the server. Please limit the number of webhooks to 20. You may use a third-party proxy server to dispatch events to more than 20 endpoints.")]
|
||||
TooManyWebhooks,
|
||||
#[error("Too many headers for the webhook `{0}`. Please limit the number of headers to 200. Hint: To remove an already defined header set its value to `null`")]
|
||||
TooManyHeaders(Uuid),
|
||||
#[error("Webhook `{0}` is immutable. The webhook defined from the command line cannot be modified using the API.")]
|
||||
ImmutableWebhook(Uuid),
|
||||
#[error("Webhook `{0}` not found.")]
|
||||
WebhookNotFound(Uuid),
|
||||
#[error("Invalid header name `{0}`: {1}")]
|
||||
InvalidHeaderName(String, ActixInvalidHeaderName),
|
||||
#[error("Invalid header value `{0}`: {1}")]
|
||||
InvalidHeaderValue(String, ActixInvalidHeaderValue),
|
||||
#[error("Invalid URL `{0}`: {1}")]
|
||||
InvalidUrl(String, url::ParseError),
|
||||
#[error("Invalid UUID: {0}")]
|
||||
InvalidUuid(uuid::Error),
|
||||
}
|
||||
|
||||
impl ErrorCode for WebhooksError {
|
||||
fn error_code(&self) -> meilisearch_types::error::Code {
|
||||
match self {
|
||||
MissingUrl(_) => meilisearch_types::error::Code::InvalidWebhookUrl,
|
||||
TooManyWebhooks => meilisearch_types::error::Code::InvalidWebhooks,
|
||||
TooManyHeaders(_) => meilisearch_types::error::Code::InvalidWebhookHeaders,
|
||||
ImmutableWebhook(_) => meilisearch_types::error::Code::ImmutableWebhook,
|
||||
WebhookNotFound(_) => meilisearch_types::error::Code::WebhookNotFound,
|
||||
InvalidHeaderName(_, _) => meilisearch_types::error::Code::InvalidWebhookHeaders,
|
||||
InvalidHeaderValue(_, _) => meilisearch_types::error::Code::InvalidWebhookHeaders,
|
||||
InvalidUrl(_, _) => meilisearch_types::error::Code::InvalidWebhookUrl,
|
||||
InvalidUuid(_) => meilisearch_types::error::Code::InvalidWebhookUuid,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn patch_webhook_inner(
|
||||
uuid: &Uuid,
|
||||
old_webhook: Webhook,
|
||||
new_webhook: WebhookSettings,
|
||||
) -> Result<Webhook, WebhooksError> {
|
||||
let Webhook { url: old_url, mut headers } = old_webhook;
|
||||
|
||||
let url = match new_webhook.url {
|
||||
Setting::Set(url) => url,
|
||||
Setting::NotSet => old_url,
|
||||
Setting::Reset => return Err(MissingUrl(uuid.to_owned())),
|
||||
};
|
||||
|
||||
match new_webhook.headers {
|
||||
Setting::Set(new_headers) => {
|
||||
for (name, value) in new_headers {
|
||||
match value {
|
||||
Setting::Set(value) => {
|
||||
headers.insert(name, value);
|
||||
}
|
||||
Setting::NotSet => continue,
|
||||
Setting::Reset => {
|
||||
headers.remove(&name);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Setting::Reset => headers.clear(),
|
||||
Setting::NotSet => (),
|
||||
};
|
||||
|
||||
if headers.len() > 200 {
|
||||
return Err(TooManyHeaders(uuid.to_owned()));
|
||||
}
|
||||
|
||||
Ok(Webhook { url, headers })
|
||||
}
|
||||
|
||||
fn check_changed(uuid: Uuid, webhook: &Webhook) -> Result<(), WebhooksError> {
|
||||
if uuid.is_nil() {
|
||||
return Err(ImmutableWebhook(uuid));
|
||||
}
|
||||
|
||||
if webhook.url.is_empty() {
|
||||
return Err(MissingUrl(uuid));
|
||||
}
|
||||
|
||||
if webhook.headers.len() > 200 {
|
||||
return Err(TooManyHeaders(uuid));
|
||||
}
|
||||
|
||||
for (header, value) in &webhook.headers {
|
||||
HeaderName::from_bytes(header.as_bytes())
|
||||
.map_err(|e| InvalidHeaderName(header.to_owned(), e))?;
|
||||
HeaderValue::from_str(value).map_err(|e| InvalidHeaderValue(header.to_owned(), e))?;
|
||||
}
|
||||
|
||||
if let Err(e) = Url::parse(&webhook.url) {
|
||||
return Err(InvalidUrl(webhook.url.to_owned(), e));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/{uuid}",
|
||||
tag = "Webhooks",
|
||||
security(("Bearer" = ["webhooks.get", "webhooks.*", "*.get", "*"])),
|
||||
responses(
|
||||
(status = 200, description = "Webhook found", body = WebhookWithMetadata, content_type = "application/json", example = json!({
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"url": "https://your.site/on-tasks-completed",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret"
|
||||
},
|
||||
"isEditable": true
|
||||
})),
|
||||
(status = 404, description = "Webhook not found", body = ResponseError, content_type = "application/json"),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json"),
|
||||
),
|
||||
params(
|
||||
("uuid" = Uuid, Path, description = "The universally unique identifier of the webhook")
|
||||
)
|
||||
)]
|
||||
async fn get_webhook(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_GET }>, Data<IndexScheduler>>,
|
||||
uuid: Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let uuid = Uuid::from_str(&uuid.into_inner()).map_err(InvalidUuid)?;
|
||||
let mut webhooks = index_scheduler.webhooks_view();
|
||||
|
||||
let webhook = webhooks.webhooks.remove(&uuid).ok_or(WebhookNotFound(uuid))?;
|
||||
let webhook = WebhookWithMetadata::from(uuid, webhook);
|
||||
|
||||
debug!(returns = ?webhook, "Get webhook");
|
||||
Ok(HttpResponse::Ok().json(webhook))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "",
|
||||
tag = "Webhooks",
|
||||
request_body = WebhookSettings,
|
||||
security(("Bearer" = ["webhooks.create", "webhooks.*", "*"])),
|
||||
responses(
|
||||
(status = 201, description = "Webhook created successfully", body = WebhookWithMetadata, content_type = "application/json", example = json!({
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"url": "https://your.site/on-tasks-completed",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token"
|
||||
},
|
||||
"isEditable": true
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json"),
|
||||
(status = 400, description = "Bad request", body = ResponseError, content_type = "application/json"),
|
||||
)
|
||||
)]
|
||||
async fn post_webhook(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_CREATE }>, Data<IndexScheduler>>,
|
||||
webhook_settings: AwebJson<WebhookSettings, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let webhook_settings = webhook_settings.into_inner();
|
||||
debug!(parameters = ?webhook_settings, "Post webhook");
|
||||
|
||||
let uuid = Uuid::new_v4();
|
||||
if webhook_settings.headers.as_ref().set().is_some_and(|h| h.len() > 200) {
|
||||
return Err(TooManyHeaders(uuid).into());
|
||||
}
|
||||
|
||||
let mut webhooks = index_scheduler.retrieve_runtime_webhooks();
|
||||
if webhooks.len() >= 20 {
|
||||
return Err(TooManyWebhooks.into());
|
||||
}
|
||||
|
||||
let webhook = Webhook {
|
||||
url: webhook_settings.url.set().ok_or(MissingUrl(uuid))?,
|
||||
headers: webhook_settings
|
||||
.headers
|
||||
.set()
|
||||
.map(|h| h.into_iter().map(|(k, v)| (k, v.set().unwrap_or_default())).collect())
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
check_changed(uuid, &webhook)?;
|
||||
webhooks.insert(uuid, webhook.clone());
|
||||
index_scheduler.update_runtime_webhooks(webhooks)?;
|
||||
|
||||
analytics.publish(PostWebhooksAnalytics, &req);
|
||||
|
||||
let response = WebhookWithMetadata::from(uuid, webhook);
|
||||
debug!(returns = ?response, "Post webhook");
|
||||
Ok(HttpResponse::Created().json(response))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
patch,
|
||||
path = "/{uuid}",
|
||||
tag = "Webhooks",
|
||||
request_body = WebhookSettings,
|
||||
security(("Bearer" = ["webhooks.update", "webhooks.*", "*"])),
|
||||
responses(
|
||||
(status = 200, description = "Webhook updated successfully", body = WebhookWithMetadata, content_type = "application/json", example = json!({
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"url": "https://your.site/on-tasks-completed",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token"
|
||||
},
|
||||
"isEditable": true
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json"),
|
||||
(status = 400, description = "Bad request", body = ResponseError, content_type = "application/json"),
|
||||
),
|
||||
params(
|
||||
("uuid" = Uuid, Path, description = "The universally unique identifier of the webhook")
|
||||
)
|
||||
)]
|
||||
async fn patch_webhook(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_UPDATE }>, Data<IndexScheduler>>,
|
||||
uuid: Path<String>,
|
||||
webhook_settings: AwebJson<WebhookSettings, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let uuid = Uuid::from_str(&uuid.into_inner()).map_err(InvalidUuid)?;
|
||||
let webhook_settings = webhook_settings.into_inner();
|
||||
debug!(parameters = ?(uuid, &webhook_settings), "Patch webhook");
|
||||
|
||||
if uuid.is_nil() {
|
||||
return Err(ImmutableWebhook(uuid).into());
|
||||
}
|
||||
|
||||
let mut webhooks = index_scheduler.retrieve_runtime_webhooks();
|
||||
let old_webhook = webhooks.remove(&uuid).ok_or(WebhookNotFound(uuid))?;
|
||||
let webhook = patch_webhook_inner(&uuid, old_webhook, webhook_settings)?;
|
||||
|
||||
check_changed(uuid, &webhook)?;
|
||||
webhooks.insert(uuid, webhook.clone());
|
||||
index_scheduler.update_runtime_webhooks(webhooks)?;
|
||||
|
||||
analytics.publish(PatchWebhooksAnalytics, &req);
|
||||
|
||||
let response = WebhookWithMetadata::from(uuid, webhook);
|
||||
debug!(returns = ?response, "Patch webhook");
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/{uuid}",
|
||||
tag = "Webhooks",
|
||||
security(("Bearer" = ["webhooks.delete", "webhooks.*", "*"])),
|
||||
responses(
|
||||
(status = 204, description = "Webhook deleted successfully"),
|
||||
(status = 404, description = "Webhook not found", body = ResponseError, content_type = "application/json"),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json"),
|
||||
),
|
||||
params(
|
||||
("uuid" = Uuid, Path, description = "The universally unique identifier of the webhook")
|
||||
)
|
||||
)]
|
||||
async fn delete_webhook(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_DELETE }>, Data<IndexScheduler>>,
|
||||
uuid: Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let uuid = Uuid::from_str(&uuid.into_inner()).map_err(InvalidUuid)?;
|
||||
debug!(parameters = ?uuid, "Delete webhook");
|
||||
|
||||
if uuid.is_nil() {
|
||||
return Err(ImmutableWebhook(uuid).into());
|
||||
}
|
||||
|
||||
let mut webhooks = index_scheduler.retrieve_runtime_webhooks();
|
||||
webhooks.remove(&uuid).ok_or(WebhookNotFound(uuid))?;
|
||||
index_scheduler.update_runtime_webhooks(webhooks)?;
|
||||
|
||||
debug!(returns = "No Content", "Delete webhook");
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
@ -13,6 +13,7 @@ use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::features::{Network, Remote};
|
||||
use meilisearch_types::milli::order_by_map::OrderByMap;
|
||||
use meilisearch_types::milli::score_details::{ScoreDetails, WeightedScoreValue};
|
||||
use meilisearch_types::milli::vector::Embedding;
|
||||
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget, DEFAULT_VALUES_PER_FACET};
|
||||
use roaring::RoaringBitmap;
|
||||
use tokio::task::JoinHandle;
|
||||
@ -46,6 +47,7 @@ pub async fn perform_federated_search(
|
||||
let deadline = before_search + std::time::Duration::from_secs(9);
|
||||
|
||||
let required_hit_count = federation.limit + federation.offset;
|
||||
let retrieve_vectors = queries.iter().any(|q| q.retrieve_vectors);
|
||||
|
||||
let network = index_scheduler.network();
|
||||
|
||||
@ -91,6 +93,7 @@ pub async fn perform_federated_search(
|
||||
federation,
|
||||
mut semantic_hit_count,
|
||||
mut results_by_index,
|
||||
mut query_vectors,
|
||||
previous_query_data: _,
|
||||
facet_order,
|
||||
} = search_by_index;
|
||||
@ -122,7 +125,26 @@ pub async fn perform_federated_search(
|
||||
.map(|hit| hit.hit())
|
||||
.collect();
|
||||
|
||||
// 3.3. merge facets
|
||||
// 3.3. merge query vectors
|
||||
let query_vectors = if retrieve_vectors {
|
||||
for remote_results in remote_results.iter_mut() {
|
||||
if let Some(remote_vectors) = remote_results.query_vectors.take() {
|
||||
for (key, value) in remote_vectors.into_iter() {
|
||||
debug_assert!(
|
||||
!query_vectors.contains_key(&key),
|
||||
"Query vector for query {key} already exists"
|
||||
);
|
||||
query_vectors.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(query_vectors)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// 3.4. merge facets
|
||||
let (facet_distribution, facet_stats, facets_by_index) =
|
||||
facet_order.merge(federation.merge_facets, remote_results, facets);
|
||||
|
||||
@ -140,6 +162,7 @@ pub async fn perform_federated_search(
|
||||
offset: federation.offset,
|
||||
estimated_total_hits,
|
||||
},
|
||||
query_vectors,
|
||||
semantic_hit_count,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
@ -408,6 +431,7 @@ fn merge_metadata(
|
||||
hits: _,
|
||||
processing_time_ms,
|
||||
hits_info,
|
||||
query_vectors: _,
|
||||
semantic_hit_count: _,
|
||||
facet_distribution: _,
|
||||
facet_stats: _,
|
||||
@ -657,6 +681,7 @@ struct SearchByIndex {
|
||||
// Then when merging, we'll update its value if there is any semantic hit
|
||||
semantic_hit_count: Option<u32>,
|
||||
results_by_index: Vec<SearchResultByIndex>,
|
||||
query_vectors: BTreeMap<usize, Embedding>,
|
||||
previous_query_data: Option<(RankingRules, usize, String)>,
|
||||
// remember the order and name of first index for each facet when merging with index settings
|
||||
// to detect if the order is inconsistent for a facet.
|
||||
@ -674,6 +699,7 @@ impl SearchByIndex {
|
||||
federation,
|
||||
semantic_hit_count: None,
|
||||
results_by_index: Vec::with_capacity(index_count),
|
||||
query_vectors: BTreeMap::new(),
|
||||
previous_query_data: None,
|
||||
}
|
||||
}
|
||||
@ -837,8 +863,19 @@ impl SearchByIndex {
|
||||
document_scores,
|
||||
degraded: query_degraded,
|
||||
used_negative_operator: query_used_negative_operator,
|
||||
query_vector,
|
||||
} = result;
|
||||
|
||||
if query.retrieve_vectors {
|
||||
if let Some(query_vector) = query_vector {
|
||||
debug_assert!(
|
||||
!self.query_vectors.contains_key(&query_index),
|
||||
"Query vector for query {query_index} already exists"
|
||||
);
|
||||
self.query_vectors.insert(query_index, query_vector);
|
||||
}
|
||||
}
|
||||
|
||||
candidates |= query_candidates;
|
||||
degraded |= query_degraded;
|
||||
used_negative_operator |= query_used_negative_operator;
|
||||
|
@ -18,6 +18,7 @@ use serde::{Deserialize, Serialize};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use super::super::{ComputedFacets, FacetStats, HitsInfo, SearchHit, SearchQueryWithIndex};
|
||||
use crate::milli::vector::Embedding;
|
||||
|
||||
pub const DEFAULT_FEDERATED_WEIGHT: f64 = 1.0;
|
||||
|
||||
@ -117,6 +118,9 @@ pub struct FederatedSearchResult {
|
||||
#[serde(flatten)]
|
||||
pub hits_info: HitsInfo,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub query_vectors: Option<BTreeMap<usize, Embedding>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub semantic_hit_count: Option<u32>,
|
||||
|
||||
@ -144,6 +148,7 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
hits,
|
||||
processing_time_ms,
|
||||
hits_info,
|
||||
query_vectors,
|
||||
semantic_hit_count,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
@ -158,6 +163,10 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
debug.field("processing_time_ms", &processing_time_ms);
|
||||
debug.field("hits", &format!("[{} hits returned]", hits.len()));
|
||||
debug.field("hits_info", &hits_info);
|
||||
if let Some(query_vectors) = query_vectors {
|
||||
let known = query_vectors.len();
|
||||
debug.field("query_vectors", &format!("[{known} known vectors]"));
|
||||
}
|
||||
if *used_negative_operator {
|
||||
debug.field("used_negative_operator", used_negative_operator);
|
||||
}
|
||||
|
@ -841,6 +841,8 @@ pub struct SearchHit {
|
||||
pub struct SearchResult {
|
||||
pub hits: Vec<SearchHit>,
|
||||
pub query: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub query_vector: Option<Vec<f32>>,
|
||||
pub processing_time_ms: u128,
|
||||
#[serde(flatten)]
|
||||
pub hits_info: HitsInfo,
|
||||
@ -865,6 +867,7 @@ impl fmt::Debug for SearchResult {
|
||||
let SearchResult {
|
||||
hits,
|
||||
query,
|
||||
query_vector,
|
||||
processing_time_ms,
|
||||
hits_info,
|
||||
facet_distribution,
|
||||
@ -879,6 +882,9 @@ impl fmt::Debug for SearchResult {
|
||||
debug.field("processing_time_ms", &processing_time_ms);
|
||||
debug.field("hits", &format!("[{} hits returned]", hits.len()));
|
||||
debug.field("query", &query);
|
||||
if query_vector.is_some() {
|
||||
debug.field("query_vector", &"[...]");
|
||||
}
|
||||
debug.field("hits_info", &hits_info);
|
||||
if *used_negative_operator {
|
||||
debug.field("used_negative_operator", used_negative_operator);
|
||||
@ -1050,6 +1056,7 @@ pub fn prepare_search<'t>(
|
||||
.map(|x| x as usize)
|
||||
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS);
|
||||
|
||||
search.retrieve_vectors(query.retrieve_vectors);
|
||||
search.exhaustive_number_hits(is_finite_pagination);
|
||||
search.max_total_hits(Some(max_total_hits));
|
||||
search.scoring_strategy(
|
||||
@ -1132,6 +1139,7 @@ pub fn perform_search(
|
||||
document_scores,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
query_vector,
|
||||
},
|
||||
semantic_hit_count,
|
||||
) = search_from_kind(index_uid, search_kind, search)?;
|
||||
@ -1222,6 +1230,7 @@ pub fn perform_search(
|
||||
hits: documents,
|
||||
hits_info,
|
||||
query: q.unwrap_or_default(),
|
||||
query_vector,
|
||||
processing_time_ms: before_search.elapsed().as_millis(),
|
||||
facet_distribution,
|
||||
facet_stats,
|
||||
@ -1734,6 +1743,7 @@ pub fn perform_similar(
|
||||
document_scores,
|
||||
degraded: _,
|
||||
used_negative_operator: _,
|
||||
query_vector: _,
|
||||
} = similar.execute().map_err(|err| match err {
|
||||
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
||||
ResponseError::from_msg(err.to_string(), Code::InvalidSimilarFilter)
|
||||
|
@ -421,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r#"
|
||||
{
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`",
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
|
@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`",
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
|
@ -249,6 +249,11 @@ impl<'a> Index<'a, Owned> {
|
||||
self.service.put_encoded(url, settings, self.encoder).await
|
||||
}
|
||||
|
||||
pub async fn update_settings_chat(&self, settings: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/chat", urlencode(self.uid.as_ref()));
|
||||
self.service.patch_encoded(url, settings, self.encoder).await
|
||||
}
|
||||
|
||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
||||
self.service.delete(url).await
|
||||
|
@ -182,6 +182,25 @@ impl Server<Owned> {
|
||||
self.service.patch("/network", value).await
|
||||
}
|
||||
|
||||
pub async fn create_webhook(&self, value: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/webhooks", value).await
|
||||
}
|
||||
|
||||
pub async fn get_webhook(&self, uuid: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/webhooks/{}", uuid.as_ref());
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn delete_webhook(&self, uuid: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/webhooks/{}", uuid.as_ref());
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn patch_webhook(&self, uuid: impl AsRef<str>, value: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/webhooks/{}", uuid.as_ref());
|
||||
self.service.patch(url, value).await
|
||||
}
|
||||
|
||||
pub async fn get_metrics(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/metrics").await
|
||||
}
|
||||
@ -447,6 +466,10 @@ impl<State> Server<State> {
|
||||
pub async fn get_network(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/network").await
|
||||
}
|
||||
|
||||
pub async fn get_webhooks(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/webhooks").await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
|
@ -1270,27 +1270,27 @@ async fn search_with_contains_without_enabling_the_feature() {
|
||||
index
|
||||
.search(json!({ "filter": "doggo CONTAINS kefir" }), |response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Using `CONTAINS` or `STARTS WITH` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"message": "Using `CONTAINS` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
})
|
||||
.await;
|
||||
index
|
||||
.search(json!({ "filter": "doggo != echo AND doggo CONTAINS kefir" }), |response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Using `CONTAINS` or `STARTS WITH` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n25:33 doggo != echo AND doggo CONTAINS kefir",
|
||||
"message": "Using `CONTAINS` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n25:33 doggo != echo AND doggo CONTAINS kefir",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
})
|
||||
.await;
|
||||
|
||||
@ -1299,24 +1299,24 @@ async fn search_with_contains_without_enabling_the_feature() {
|
||||
index.search_post(json!({ "filter": ["doggo != echo", "doggo CONTAINS kefir"] })).await;
|
||||
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Using `CONTAINS` or `STARTS WITH` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"message": "Using `CONTAINS` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
let (response, code) =
|
||||
index.search_post(json!({ "filter": ["doggo != echo", ["doggo CONTAINS kefir"]] })).await;
|
||||
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Using `CONTAINS` or `STARTS WITH` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"message": "Using `CONTAINS` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
@ -148,7 +148,70 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}}},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}}},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}}}]"###);
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"id": "2",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
2.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"id": "3",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
2.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"id": "1",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"query": "Captain",
|
||||
"queryVector": [
|
||||
1.0,
|
||||
1.0
|
||||
],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
|
||||
let (response, code) = index
|
||||
@ -157,7 +220,73 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.9848484848484848},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###);
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"id": "3",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
2.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.990290343761444
|
||||
},
|
||||
{
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"id": "2",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
2.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.9848484848484848
|
||||
},
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"id": "1",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.9472135901451112
|
||||
}
|
||||
],
|
||||
"query": "Captain",
|
||||
"queryVector": [
|
||||
1.0,
|
||||
1.0
|
||||
],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"semanticHitCount": 2
|
||||
}
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"2");
|
||||
|
||||
let (response, code) = index
|
||||
@ -166,7 +295,73 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###);
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"id": "3",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
2.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.990290343761444
|
||||
},
|
||||
{
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"id": "2",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
2.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.974341630935669
|
||||
},
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"id": "1",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.9472135901451112
|
||||
}
|
||||
],
|
||||
"query": "Captain",
|
||||
"queryVector": [
|
||||
1.0,
|
||||
1.0
|
||||
],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"semanticHitCount": 3
|
||||
}
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"3");
|
||||
}
|
||||
|
||||
|
@ -3703,7 +3703,7 @@ async fn federation_vector_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@ -3911,9 +3911,20 @@ async fn federation_vector_two_indexes() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 8,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.5
|
||||
],
|
||||
"1": [
|
||||
0.8,
|
||||
0.6
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 6
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// hybrid search, distinct embedder
|
||||
let (response, code) = server
|
||||
@ -3923,7 +3934,7 @@ async fn federation_vector_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@ -4139,9 +4150,20 @@ async fn federation_vector_two_indexes() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 8,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.5
|
||||
],
|
||||
"1": [
|
||||
-1.0,
|
||||
0.6
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 8
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -2,8 +2,9 @@ use std::sync::Arc;
|
||||
|
||||
use actix_http::StatusCode;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use wiremock::matchers::AnyMatcher;
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::{path, AnyMatcher};
|
||||
use wiremock::{Mock, MockServer, Request, ResponseTemplate};
|
||||
|
||||
use crate::common::{Server, Value, SCORE_DOCUMENTS};
|
||||
use crate::json;
|
||||
@ -415,6 +416,503 @@ async fn remote_sharding() {
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn remote_sharding_retrieve_vectors() {
|
||||
let ms0 = Server::new().await;
|
||||
let ms1 = Server::new().await;
|
||||
let ms2 = Server::new().await;
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let index2 = ms2.index("test");
|
||||
|
||||
// enable feature
|
||||
|
||||
let (response, code) = ms0.set_features(json!({"network": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["network"]), @"true");
|
||||
let (response, code) = ms1.set_features(json!({"network": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["network"]), @"true");
|
||||
let (response, code) = ms2.set_features(json!({"network": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["network"]), @"true");
|
||||
|
||||
// set self
|
||||
|
||||
let (response, code) = ms0.set_network(json!({"self": "ms0"})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms2.set_network(json!({"self": "ms2"})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms2",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
|
||||
// setup embedders
|
||||
|
||||
let mock_server = MockServer::start().await;
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/"))
|
||||
.respond_with(move |req: &Request| {
|
||||
println!("Received request: {:?}", req);
|
||||
let text = req.body_json::<String>().unwrap().to_lowercase();
|
||||
let patterns = [
|
||||
("batman", [1.0, 0.0, 0.0]),
|
||||
("dark", [0.0, 0.1, 0.0]),
|
||||
("knight", [0.1, 0.1, 0.0]),
|
||||
("returns", [0.0, 0.0, 0.2]),
|
||||
("part", [0.05, 0.1, 0.0]),
|
||||
("1", [0.3, 0.05, 0.0]),
|
||||
("2", [0.2, 0.05, 0.0]),
|
||||
];
|
||||
let mut embedding = vec![0.; 3];
|
||||
for (pattern, vector) in patterns {
|
||||
if text.contains(pattern) {
|
||||
for (i, v) in vector.iter().enumerate() {
|
||||
embedding[i] += v;
|
||||
}
|
||||
}
|
||||
}
|
||||
ResponseTemplate::new(200).set_body_json(json!({ "data": embedding }))
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
for (server, index) in [(&ms0, &index0), (&ms1, &index1), (&ms2, &index2)] {
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"embedders": {
|
||||
"rest": {
|
||||
"source": "rest",
|
||||
"url": url,
|
||||
"dimensions": 3,
|
||||
"request": "{{text}}",
|
||||
"response": { "data": "{{embedding}}" },
|
||||
"documentTemplate": "{{doc.name}}",
|
||||
},
|
||||
},
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
let ms1 = Arc::new(ms1);
|
||||
let ms2 = Arc::new(ms2);
|
||||
|
||||
let rms0 = LocalMeili::new(ms0.clone()).await;
|
||||
let rms1 = LocalMeili::new(ms1.clone()).await;
|
||||
let rms2 = LocalMeili::new(ms2.clone()).await;
|
||||
|
||||
// set network
|
||||
let network = json!({"remotes": {
|
||||
"ms0": {
|
||||
"url": rms0.url()
|
||||
},
|
||||
"ms1": {
|
||||
"url": rms1.url()
|
||||
},
|
||||
"ms2": {
|
||||
"url": rms2.url()
|
||||
}
|
||||
}});
|
||||
|
||||
let (_response, status_code) = ms0.set_network(network.clone()).await;
|
||||
snapshot!(status_code, @"200 OK");
|
||||
let (_response, status_code) = ms1.set_network(network.clone()).await;
|
||||
snapshot!(status_code, @"200 OK");
|
||||
let (_response, status_code) = ms2.set_network(network.clone()).await;
|
||||
snapshot!(status_code, @"200 OK");
|
||||
|
||||
// multi vector search: one query per remote
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms2"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"1": [
|
||||
0.1,
|
||||
0.2,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// multi vector search: two local queries, one remote
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms2"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"1": [
|
||||
0.1,
|
||||
0.2,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// multi vector search: two queries on the same remote
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"1": [
|
||||
0.1,
|
||||
0.2,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// multi search: two vector, one keyword
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 0.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// multi vector search: no local queries, all remote
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"1": [
|
||||
0.1,
|
||||
0.2,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_unregistered_remote() {
|
||||
let ms0 = Server::new().await;
|
||||
|
66
crates/meilisearch/tests/settings/chat.rs
Normal file
66
crates/meilisearch/tests/settings/chat.rs
Normal file
@ -0,0 +1,66 @@
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_reset_chat_issue_5772() {
|
||||
let server = Server::new().await;
|
||||
let index = server.unique_index();
|
||||
|
||||
let (_, code) = server
|
||||
.set_features(json!({
|
||||
"chatCompletions": true,
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @r#"200 OK"#);
|
||||
|
||||
let (task1, _code) = index.update_settings_chat(json!({
|
||||
"description": "test!",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
"documentTemplateMaxBytes": 400,
|
||||
"searchParameters": {
|
||||
"limit": 15,
|
||||
"sort": [],
|
||||
"attributesToSearchOn": []
|
||||
}
|
||||
})).await;
|
||||
server.wait_task(task1.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
snapshot!(json_string!(response["chat"]), @r#"
|
||||
{
|
||||
"description": "test!",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
"documentTemplateMaxBytes": 400,
|
||||
"searchParameters": {
|
||||
"limit": 15,
|
||||
"sort": [],
|
||||
"attributesToSearchOn": []
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (task2, _status_code) = index.update_settings_chat(json!({
|
||||
"description": "test!",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
"documentTemplateMaxBytes": 400,
|
||||
"searchParameters": {
|
||||
"limit": 16
|
||||
}
|
||||
})).await;
|
||||
server.wait_task(task2.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
snapshot!(json_string!(response["chat"]), @r#"
|
||||
{
|
||||
"description": "test!",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
"documentTemplateMaxBytes": 400,
|
||||
"searchParameters": {
|
||||
"limit": 16,
|
||||
"sort": [],
|
||||
"attributesToSearchOn": []
|
||||
}
|
||||
}
|
||||
"#);
|
||||
}
|
@ -186,7 +186,7 @@ test_setting_routes!(
|
||||
},
|
||||
{
|
||||
setting: chat,
|
||||
update_verb: put,
|
||||
update_verb: patch,
|
||||
default_value: {
|
||||
"description": "",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
|
@ -1,3 +1,4 @@
|
||||
mod chat;
|
||||
mod distinct;
|
||||
mod errors;
|
||||
mod get_settings;
|
||||
|
@ -2,16 +2,18 @@
|
||||
//! post requests. The webhook handle starts a server and forwards all the
|
||||
//! received requests into a channel for you to handle.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_http::body::MessageBody;
|
||||
use actix_web::dev::{ServiceFactory, ServiceResponse};
|
||||
use actix_web::web::{Bytes, Data};
|
||||
use actix_web::{post, App, HttpRequest, HttpResponse, HttpServer};
|
||||
use meili_snap::snapshot;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch::Opt;
|
||||
use tokio::sync::mpsc;
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::common::{self, default_settings, Server};
|
||||
use crate::json;
|
||||
@ -68,21 +70,55 @@ async fn create_webhook_server() -> WebhookHandle {
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_basic_webhook() {
|
||||
let WebhookHandle { server_handle, url, mut receiver } = create_webhook_server().await;
|
||||
|
||||
async fn cli_only() {
|
||||
let db_path = tempfile::tempdir().unwrap();
|
||||
let server = Server::new_with_options(Opt {
|
||||
task_webhook_url: Some(Url::parse(&url).unwrap()),
|
||||
task_webhook_url: Some(Url::parse("https://example-cli.com/").unwrap()),
|
||||
task_webhook_authorization_header: Some(String::from("Bearer a-secret-token")),
|
||||
..default_settings(db_path.path())
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let index = server.index("tamo");
|
||||
let (webhooks, code) = server.get_webhooks().await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(webhooks, @r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uuid": "00000000-0000-0000-0000-000000000000",
|
||||
"isEditable": false,
|
||||
"url": "https://example-cli.com/",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn single_receives_data() {
|
||||
let WebhookHandle { server_handle, url, mut receiver } = create_webhook_server().await;
|
||||
|
||||
let server = Server::new().await;
|
||||
|
||||
let (value, code) = server.create_webhook(json!({ "url": url })).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]", ".url" => "[ignored]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "[ignored]",
|
||||
"headers": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// May be flaky: we're relying on the fact that while the first document addition is processed, the other
|
||||
// operations will be received and will be batched together. If it doesn't happen it's not a problem
|
||||
// the rest of the test won't assume anything about the number of tasks per batch.
|
||||
let index = server.index("tamo");
|
||||
for i in 0..5 {
|
||||
let (_, _status) = index.add_documents(json!({ "id": i, "doggo": "bone" }), None).await;
|
||||
}
|
||||
@ -127,3 +163,496 @@ async fn test_basic_webhook() {
|
||||
|
||||
server_handle.abort();
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn multiple_receive_data() {
|
||||
let WebhookHandle { server_handle: handle1, url: url1, receiver: mut receiver1 } =
|
||||
create_webhook_server().await;
|
||||
let WebhookHandle { server_handle: handle2, url: url2, receiver: mut receiver2 } =
|
||||
create_webhook_server().await;
|
||||
let WebhookHandle { server_handle: handle3, url: url3, receiver: mut receiver3 } =
|
||||
create_webhook_server().await;
|
||||
|
||||
let db_path = tempfile::tempdir().unwrap();
|
||||
let server = Server::new_with_options(Opt {
|
||||
task_webhook_url: Some(Url::parse(&url3).unwrap()),
|
||||
..default_settings(db_path.path())
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
for url in [url1, url2] {
|
||||
let (value, code) = server.create_webhook(json!({ "url": url })).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]", ".url" => "[ignored]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "[ignored]",
|
||||
"headers": {}
|
||||
}
|
||||
"#);
|
||||
}
|
||||
let index = server.index("tamo");
|
||||
let (_, status) = index.add_documents(json!({ "id": 1, "doggo": "bone" }), None).await;
|
||||
snapshot!(status, @"202 Accepted");
|
||||
|
||||
let mut count1 = 0;
|
||||
let mut count2 = 0;
|
||||
let mut count3 = 0;
|
||||
while count1 == 0 || count2 == 0 || count3 == 0 {
|
||||
tokio::select! {
|
||||
msg = receiver1.recv() => { if msg.is_some() { count1 += 1; } },
|
||||
msg = receiver2.recv() => { if msg.is_some() { count2 += 1; } },
|
||||
msg = receiver3.recv() => { if msg.is_some() { count3 += 1; } },
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(count1, 1);
|
||||
assert_eq!(count2, 1);
|
||||
assert_eq!(count3, 1);
|
||||
|
||||
handle1.abort();
|
||||
handle2.abort();
|
||||
handle3.abort();
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn cli_with_dumps() {
|
||||
let db_path = tempfile::tempdir().unwrap();
|
||||
let server = Server::new_with_options(Opt {
|
||||
task_webhook_url: Some(Url::parse("http://defined-in-test-cli.com").unwrap()),
|
||||
task_webhook_authorization_header: Some(String::from(
|
||||
"Bearer a-secret-token-defined-in-test-cli",
|
||||
)),
|
||||
import_dump: Some(PathBuf::from("../dump/tests/assets/v6-with-webhooks.dump")),
|
||||
..default_settings(db_path.path())
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (webhooks, code) = server.get_webhooks().await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(webhooks, @r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uuid": "00000000-0000-0000-0000-000000000000",
|
||||
"isEditable": false,
|
||||
"url": "http://defined-in-test-cli.com/",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token-defined-in-test-cli"
|
||||
}
|
||||
},
|
||||
{
|
||||
"uuid": "627ea538-733d-4545-8d2d-03526eb381ce",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/authorization-less",
|
||||
"headers": {}
|
||||
},
|
||||
{
|
||||
"uuid": "771b0a28-ef28-4082-b984-536f82958c65",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
},
|
||||
{
|
||||
"uuid": "f3583083-f8a7-4cbf-a5e7-fb3f1e28a7e9",
|
||||
"isEditable": true,
|
||||
"url": "https://third.com",
|
||||
"headers": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn reserved_names() {
|
||||
let db_path = tempfile::tempdir().unwrap();
|
||||
let server = Server::new_with_options(Opt {
|
||||
task_webhook_url: Some(Url::parse("https://example-cli.com/").unwrap()),
|
||||
task_webhook_authorization_header: Some(String::from("Bearer a-secret-token")),
|
||||
..default_settings(db_path.path())
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (value, code) = server
|
||||
.patch_webhook(Uuid::nil().to_string(), json!({ "url": "http://localhost:8080" }))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Webhook `[uuid]` is immutable. The webhook defined from the command line cannot be modified using the API.",
|
||||
"code": "immutable_webhook",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook"
|
||||
}
|
||||
"#);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
|
||||
let (value, code) = server.delete_webhook(Uuid::nil().to_string()).await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Webhook `[uuid]` is immutable. The webhook defined from the command line cannot be modified using the API.",
|
||||
"code": "immutable_webhook",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook"
|
||||
}
|
||||
"#);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn over_limits() {
|
||||
let server = Server::new().await;
|
||||
|
||||
// Too many webhooks
|
||||
let mut uuids = Vec::new();
|
||||
for _ in 0..20 {
|
||||
let (value, code) = server.create_webhook(json!({ "url": "http://localhost:8080" } )).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
uuids.push(value.get("uuid").unwrap().as_str().unwrap().to_string());
|
||||
}
|
||||
let (value, code) = server.create_webhook(json!({ "url": "http://localhost:8080" })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Defining too many webhooks would crush the server. Please limit the number of webhooks to 20. You may use a third-party proxy server to dispatch events to more than 20 endpoints.",
|
||||
"code": "invalid_webhooks",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhooks"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Reset webhooks
|
||||
for uuid in uuids {
|
||||
let (_value, code) = server.delete_webhook(&uuid).await;
|
||||
snapshot!(code, @"204 No Content");
|
||||
}
|
||||
|
||||
// Test too many headers
|
||||
let (value, code) = server.create_webhook(json!({ "url": "http://localhost:8080" })).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
let uuid = value.get("uuid").unwrap().as_str().unwrap();
|
||||
for i in 0..200 {
|
||||
let header_name = format!("header_{i}");
|
||||
let (_value, code) =
|
||||
server.patch_webhook(uuid, json!({ "headers": { header_name: "" } })).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
}
|
||||
let (value, code) =
|
||||
server.patch_webhook(uuid, json!({ "headers": { "header_200": "" } })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Too many headers for the webhook `[uuid]`. Please limit the number of headers to 200. Hint: To remove an already defined header set its value to `null`",
|
||||
"code": "invalid_webhook_headers",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_headers"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn post_get_delete() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook",
|
||||
"headers": { "authorization": "TOKEN" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"201 Created");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let uuid = value.get("uuid").unwrap().as_str().unwrap();
|
||||
let (value, code) = server.get_webhook(uuid).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (_value, code) = server.delete_webhook(uuid).await;
|
||||
snapshot!(code, @"204 No Content");
|
||||
|
||||
let (_value, code) = server.get_webhook(uuid).await;
|
||||
snapshot!(code, @"404 Not Found");
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn create_and_patch() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (value, code) =
|
||||
server.create_webhook(json!({ "headers": { "authorization": "TOKEN" } })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "The URL for the webhook `[uuid]` is missing.",
|
||||
"code": "invalid_webhook_url",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_url"
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, code) = server.create_webhook(json!({ "url": "https://example.com/hook" })).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
let uuid = value.get("uuid").unwrap().as_str().unwrap();
|
||||
let (value, code) =
|
||||
server.patch_webhook(&uuid, json!({ "headers": { "authorization": "TOKEN" } })).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, code) =
|
||||
server.patch_webhook(&uuid, json!({ "headers": { "authorization2": "TOKEN" } })).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN",
|
||||
"authorization2": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, code) =
|
||||
server.patch_webhook(&uuid, json!({ "headers": { "authorization": null } })).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization2": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, code) = server.patch_webhook(&uuid, json!({ "url": null })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"message": "The URL for the webhook `[uuid]` is missing.",
|
||||
"code": "invalid_webhook_url",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_url"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn invalid_url_and_headers() {
|
||||
let server = Server::new().await;
|
||||
|
||||
// Test invalid URL format
|
||||
let (value, code) = server.create_webhook(json!({ "url": "not-a-valid-url" })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid URL `not-a-valid-url`: relative URL without a base",
|
||||
"code": "invalid_webhook_url",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_url"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test invalid header name (containing spaces)
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook",
|
||||
"headers": { "invalid header name": "value" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid header name `invalid header name`: invalid HTTP header name",
|
||||
"code": "invalid_webhook_headers",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_headers"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test invalid header value (containing control characters)
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook",
|
||||
"headers": { "authorization": "token\nwith\nnewlines" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid header value `authorization`: failed to parse header value",
|
||||
"code": "invalid_webhook_headers",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_headers"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn invalid_uuid() {
|
||||
let server = Server::new().await;
|
||||
|
||||
// Test get webhook with invalid UUID
|
||||
let (value, code) = server.get_webhook("invalid-uuid").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid UUID: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1",
|
||||
"code": "invalid_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test update webhook with invalid UUID
|
||||
let (value, code) =
|
||||
server.patch_webhook("invalid-uuid", json!({ "url": "https://example.com/hook" })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid UUID: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1",
|
||||
"code": "invalid_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test delete webhook with invalid UUID
|
||||
let (value, code) = server.delete_webhook("invalid-uuid").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid UUID: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1",
|
||||
"code": "invalid_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn forbidden_fields() {
|
||||
let server = Server::new().await;
|
||||
|
||||
// Test creating webhook with uuid field
|
||||
let custom_uuid = Uuid::new_v4();
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook",
|
||||
"uuid": custom_uuid.to_string(),
|
||||
"headers": { "authorization": "TOKEN" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Immutable field `uuid`: expected one of `url`, `headers`",
|
||||
"code": "immutable_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test creating webhook with isEditable field
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook2",
|
||||
"isEditable": false,
|
||||
"headers": { "authorization": "TOKEN" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Immutable field `isEditable`: expected one of `url`, `headers`",
|
||||
"code": "immutable_webhook_is_editable",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook_is_editable"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test patching webhook with uuid field
|
||||
let (value, code) = server
|
||||
.patch_webhook(
|
||||
"uuid-whatever",
|
||||
json!({
|
||||
"uuid": Uuid::new_v4(),
|
||||
"headers": { "new-header": "value" }
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Immutable field `uuid`: expected one of `url`, `headers`",
|
||||
"code": "immutable_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test patching webhook with isEditable field
|
||||
let (value, code) = server
|
||||
.patch_webhook(
|
||||
"uuid-whatever",
|
||||
json!({
|
||||
"isEditable": false,
|
||||
"headers": { "another-header": "value" }
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"message": "Immutable field `isEditable`: expected one of `url`, `headers`",
|
||||
"code": "immutable_webhook_is_editable",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook_is_editable"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ async fn version_too_old() {
|
||||
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.16.0");
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.17.1");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
|
||||
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.16.1 is higher than the Meilisearch version 1.16.0. Downgrade is not supported");
|
||||
snapshot!(err, @"Database version 1.17.2 is higher than the Meilisearch version 1.17.1. Downgrade is not supported");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -323,7 +323,7 @@ async fn binary_quantize_clear_documents() {
|
||||
// Make sure the arroy DB has been cleared
|
||||
let (documents, _code) =
|
||||
index.search_post(json!({ "hybrid": { "embedder": "manual" }, "vector": [1, 1, 1] })).await;
|
||||
snapshot!(documents, @r###"
|
||||
snapshot!(documents, @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
@ -333,5 +333,5 @@ async fn binary_quantize_clear_documents() {
|
||||
"estimatedTotalHits": 0,
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
@ -687,7 +687,7 @@ async fn clear_documents() {
|
||||
// Make sure the arroy DB has been cleared
|
||||
let (documents, _code) =
|
||||
index.search_post(json!({ "vector": [1, 1, 1], "hybrid": {"embedder": "manual"} })).await;
|
||||
snapshot!(documents, @r###"
|
||||
snapshot!(documents, @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
@ -697,7 +697,7 @@ async fn clear_documents() {
|
||||
"estimatedTotalHits": 0,
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -741,7 +741,7 @@ async fn add_remove_one_vector_4588() {
|
||||
json!({"vector": [1, 1, 1], "hybrid": {"semanticRatio": 1.0, "embedder": "manual"} }),
|
||||
)
|
||||
.await;
|
||||
snapshot!(documents, @r###"
|
||||
snapshot!(documents, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@ -756,7 +756,7 @@ async fn add_remove_one_vector_4588() {
|
||||
"estimatedTotalHits": 1,
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let (documents, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
|
||||
|
Reference in New Issue
Block a user