Merge branch 'main' into faster-batches-it-tests

This commit is contained in:
Martin Grigorov
2025-07-11 10:49:22 +03:00
committed by GitHub
346 changed files with 14188 additions and 4972 deletions

View File

@ -13,50 +13,50 @@ license.workspace = true
default-run = "meilisearch"
[dependencies]
actix-cors = "0.7.0"
actix-http = { version = "3.9.0", default-features = false, features = [
actix-cors = "0.7.1"
actix-http = { version = "3.11.0", default-features = false, features = [
"compress-brotli",
"compress-gzip",
"rustls-0_23",
] }
actix-utils = "3.0.1"
actix-web = { version = "4.9.0", default-features = false, features = [
actix-web = { version = "4.11.0", default-features = false, features = [
"macros",
"compress-brotli",
"compress-gzip",
"cookies",
"rustls-0_23",
] }
anyhow = { version = "1.0.95", features = ["backtrace"] }
async-trait = "0.1.85"
bstr = "1.11.3"
anyhow = { version = "1.0.98", features = ["backtrace"] }
bstr = "1.12.0"
byte-unit = { version = "5.1.6", features = ["serde"] }
bytes = "1.9.0"
clap = { version = "4.5.24", features = ["derive", "env"] }
bytes = "1.10.1"
bumpalo = "3.18.1"
clap = { version = "4.5.40", features = ["derive", "env"] }
crossbeam-channel = "0.5.15"
deserr = { version = "0.6.3", features = ["actix-web"] }
dump = { path = "../dump" }
either = "1.13.0"
either = "1.15.0"
file-store = { path = "../file-store" }
flate2 = "1.0.35"
flate2 = "1.1.2"
fst = "0.4.7"
futures = "0.3.31"
futures-util = "0.3.31"
index-scheduler = { path = "../index-scheduler" }
indexmap = { version = "2.7.0", features = ["serde"] }
is-terminal = "0.4.13"
indexmap = { version = "2.9.0", features = ["serde"] }
is-terminal = "0.4.16"
itertools = "0.14.0"
jsonwebtoken = "9.3.0"
jsonwebtoken = "9.3.1"
lazy_static = "1.5.0"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" }
mimalloc = { version = "0.1.43", default-features = false }
mimalloc = { version = "0.1.47", default-features = false }
mime = "0.3.17"
num_cpus = "1.16.0"
num_cpus = "1.17.0"
obkv = "0.3.0"
once_cell = "1.20.2"
ordered-float = "4.6.0"
parking_lot = "0.12.3"
once_cell = "1.21.3"
ordered-float = "5.0.0"
parking_lot = "0.12.4"
permissive-json-pointer = { path = "../permissive-json-pointer" }
pin-project-lite = "0.2.16"
platform-dirs = "0.3.0"
@ -64,44 +64,44 @@ prometheus = { version = "0.14.0", features = ["process"] }
rand = "0.8.5"
rayon = "1.10.0"
regex = "1.11.1"
reqwest = { version = "0.12.12", features = [
reqwest = { version = "0.12.20", features = [
"rustls-tls",
"json",
], default-features = false }
rustls = { version = "0.23.20", features = ["ring"], default-features = false }
rustls-pki-types = { version = "1.10.1", features = ["alloc"] }
rustls = { version = "0.23.28", features = ["ring"], default-features = false }
rustls-pki-types = { version = "1.12.0", features = ["alloc"] }
rustls-pemfile = "2.2.0"
segment = { version = "0.2.5" }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
sha2 = "0.10.8"
segment = { version = "0.2.6" }
serde = { version = "1.0.219", features = ["derive"] }
serde_json = { version = "1.0.140", features = ["preserve_order"] }
sha2 = "0.10.9"
siphasher = "1.0.1"
slice-group-by = "0.3.1"
static-files = { version = "0.2.4", optional = true }
sysinfo = "0.33.1"
tar = "0.4.43"
tempfile = "3.15.0"
thiserror = "2.0.9"
time = { version = "0.3.37", features = [
static-files = { version = "0.2.5", optional = true }
sysinfo = "0.35.2"
tar = "0.4.44"
tempfile = "3.20.0"
thiserror = "2.0.12"
time = { version = "0.3.41", features = [
"serde-well-known",
"formatting",
"parsing",
"macros",
] }
tokio = { version = "1.43.1", features = ["full"] }
toml = "0.8.19"
uuid = { version = "1.11.0", features = ["serde", "v4"] }
tokio = { version = "1.45.1", features = ["full"] }
toml = "0.8.23"
uuid = { version = "1.17.0", features = ["serde", "v4"] }
serde_urlencoded = "0.7.1"
termcolor = "1.4.1"
url = { version = "2.5.4", features = ["serde"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["json"] }
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
tracing-actix-web = "0.7.15"
tracing-actix-web = "0.7.18"
build-info = { version = "1.7.0", path = "../build-info" }
roaring = "0.10.10"
roaring = "0.10.12"
mopa-maintained = "0.2.3"
utoipa = { version = "5.3.1", features = [
utoipa = { version = "5.4.0", features = [
"actix_extras",
"macros",
"non_strict_integers",
@ -111,10 +111,13 @@ utoipa = { version = "5.3.1", features = [
"openapi_extensions",
] }
utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] }
async-openai = { git = "https://github.com/meilisearch/async-openai", branch = "better-error-handling" }
secrecy = "0.10.3"
actix-web-lab = { version = "0.24.1", default-features = false }
[dev-dependencies]
actix-rt = "2.10.0"
brotli = "6.0.0"
brotli = "8.0.1"
# fixed version due to format breakages in v1.40
insta = { version = "=1.39.0", features = ["redactions"] }
manifest-dir-macros = "0.1.18"
@ -122,21 +125,21 @@ maplit = "1.0.2"
meili-snap = { path = "../meili-snap" }
temp-env = "0.3.6"
urlencoding = "2.1.3"
wiremock = "0.6.2"
wiremock = "0.6.3"
yaup = "0.3.1"
[build-dependencies]
anyhow = { version = "1.0.95", optional = true }
cargo_toml = { version = "0.21.0", optional = true }
anyhow = { version = "1.0.98", optional = true }
cargo_toml = { version = "0.22.1", optional = true }
hex = { version = "0.4.3", optional = true }
reqwest = { version = "0.12.12", features = [
reqwest = { version = "0.12.20", features = [
"blocking",
"rustls-tls",
], default-features = false, optional = true }
sha-1 = { version = "0.10.1", optional = true }
static-files = { version = "0.2.4", optional = true }
tempfile = { version = "3.15.0", optional = true }
zip = { version = "2.3.0", optional = true }
static-files = { version = "0.2.5", optional = true }
tempfile = { version = "3.20.0", optional = true }
zip = { version = "4.1.0", optional = true }
[features]
default = ["meilisearch-types/all-tokenizations", "mini-dashboard"]
@ -166,5 +169,5 @@ german = ["meilisearch-types/german"]
turkish = ["meilisearch-types/turkish"]
[package.metadata.mini-dashboard]
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.19/build.zip"
sha1 = "7974430d5277c97f67cf6e95eec6faaac2788834"
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.20/build.zip"
sha1 = "82a7ddd7bf14bb5323c3d235d2b62892a98b6a59"

View File

@ -197,9 +197,13 @@ struct Infos {
experimental_max_number_of_batched_tasks: usize,
experimental_limit_batched_tasks_total_size: u64,
experimental_network: bool,
experimental_multimodal: bool,
experimental_chat_completions: bool,
experimental_get_task_documents_route: bool,
experimental_composite_embedders: bool,
experimental_embedding_cache_entries: usize,
experimental_no_snapshot_compaction: bool,
experimental_no_edition_2024_for_settings: bool,
gpu_enabled: bool,
db_path: bool,
import_dump: bool,
@ -248,6 +252,7 @@ impl Infos {
experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size,
experimental_embedding_cache_entries,
experimental_no_snapshot_compaction,
http_addr,
master_key: _,
env,
@ -283,8 +288,12 @@ impl Infos {
ScheduleSnapshot::Enabled(interval) => Some(interval),
};
let IndexerOpts { max_indexing_memory, max_indexing_threads, skip_index_budget: _ } =
indexer_options;
let IndexerOpts {
max_indexing_memory,
max_indexing_threads,
skip_index_budget: _,
experimental_no_edition_2024_for_settings,
} = indexer_options;
let RuntimeTogglableFeatures {
metrics,
@ -294,6 +303,8 @@ impl Infos {
network,
get_task_documents_route,
composite_embedders,
chat_completions,
multimodal,
} = features;
// We're going to override every sensible information.
@ -312,9 +323,12 @@ impl Infos {
experimental_enable_logs_route: experimental_enable_logs_route | logs_route,
experimental_reduce_indexing_memory_usage,
experimental_network: network,
experimental_chat_completions: chat_completions,
experimental_multimodal: multimodal,
experimental_get_task_documents_route: get_task_documents_route,
experimental_composite_embedders: composite_embedders,
experimental_embedding_cache_entries,
experimental_no_snapshot_compaction,
gpu_enabled: meilisearch_types::milli::vector::is_cuda_enabled(),
db_path: db_path != PathBuf::from("./data.ms"),
import_dump: import_dump.is_some(),
@ -344,6 +358,7 @@ impl Infos {
ssl_require_auth,
ssl_resumption,
ssl_tickets,
experimental_no_edition_2024_for_settings,
}
}
}

View File

@ -76,8 +76,10 @@ pub enum MeilisearchHttpError {
DocumentFormat(#[from] DocumentFormatError),
#[error(transparent)]
Join(#[from] JoinError),
#[error("Invalid request: missing `hybrid` parameter when `vector` is present.")]
#[error("Invalid request: missing `hybrid` parameter when `vector` or `media` are present.")]
MissingSearchHybrid,
#[error("Invalid request: both `media` and `vector` parameters are present.")]
MediaAndVector,
}
impl MeilisearchHttpError {
@ -111,6 +113,7 @@ impl ErrorCode for MeilisearchHttpError {
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
MeilisearchHttpError::Join(_) => Code::Internal,
MeilisearchHttpError::MissingSearchHybrid => Code::MissingSearchHybrid,
MeilisearchHttpError::MediaAndVector => Code::InvalidSearchMediaAndVector,
MeilisearchHttpError::FederationOptionsInNonFederatedRequest(_) => {
Code::InvalidMultiSearchFederationOptions
}

View File

@ -4,6 +4,7 @@ use std::marker::PhantomData;
use std::ops::Deref;
use std::pin::Pin;
use actix_web::http::header::AUTHORIZATION;
use actix_web::web::Data;
use actix_web::FromRequest;
pub use error::AuthenticationError;
@ -94,36 +95,44 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
_payload: &mut actix_web::dev::Payload,
) -> Self::Future {
match req.app_data::<Data<AuthController>>().cloned() {
Some(auth) => match req
.headers()
.get("Authorization")
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
{
Some(mut type_token) => match type_token.next() {
Some("Bearer") => {
// TODO: find a less hardcoded way?
let index = req.match_info().get("index_uid");
match type_token.next() {
Some(token) => Box::pin(Self::auth_bearer(
auth,
token.to_string(),
index.map(String::from),
req.app_data::<D>().cloned(),
)),
None => Box::pin(err(AuthenticationError::InvalidToken.into())),
}
}
_otherwise => {
Box::pin(err(AuthenticationError::MissingAuthorizationHeader.into()))
}
},
None => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
Some(auth) => match extract_token_from_request(req) {
Ok(Some(token)) => {
// TODO: find a less hardcoded way?
let index = req.match_info().get("index_uid");
Box::pin(Self::auth_bearer(
auth,
token.to_string(),
index.map(String::from),
req.app_data::<D>().cloned(),
))
}
Ok(None) => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
Err(e) => Box::pin(err(e.into())),
},
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
}
}
}
pub fn extract_token_from_request(
req: &actix_web::HttpRequest,
) -> Result<Option<&str>, AuthenticationError> {
match req
.headers()
.get(AUTHORIZATION)
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
{
Some(mut type_token) => match type_token.next() {
Some("Bearer") => match type_token.next() {
Some(token) => Ok(Some(token)),
None => Err(AuthenticationError::InvalidToken),
},
_otherwise => Err(AuthenticationError::MissingAuthorizationHeader),
},
None => Ok(None),
}
}
pub trait Policy {
fn authenticate(
auth: Data<AuthController>,
@ -299,8 +308,8 @@ pub mod policies {
auth: &AuthController,
token: &str,
) -> Result<TenantTokenOutcome, AuthError> {
// Only search action can be accessed by a tenant token.
if A != actions::SEARCH {
// Only search and chat actions can be accessed by a tenant token.
if A != actions::SEARCH && A != actions::CHAT_COMPLETIONS {
return Ok(TenantTokenOutcome::NotATenantToken);
}

View File

@ -37,6 +37,7 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
use meilisearch_auth::{open_auth_store_env, AuthController};
use meilisearch_types::milli::constants::VERSION_MAJOR;
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use meilisearch_types::milli::progress::{EmbedderStats, Progress};
use meilisearch_types::milli::update::{
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
};
@ -236,6 +237,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc<IndexScheduler>, Arc<
instance_features: opt.to_instance_features(),
auto_upgrade: opt.experimental_dumpless_upgrade,
embedding_cache_cap: opt.experimental_embedding_cache_entries,
experimental_no_snapshot_compaction: opt.experimental_no_snapshot_compaction,
};
let binary_version = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
@ -462,6 +464,7 @@ fn import_dump(
index_scheduler: &mut IndexScheduler,
auth: &mut AuthController,
) -> Result<(), anyhow::Error> {
let progress = Progress::default();
let reader = File::open(dump_path)?;
let mut dump_reader = dump::DumpReader::open(reader)?;
@ -495,14 +498,20 @@ fn import_dump(
keys.push(key);
}
// 3. Import the runtime features and network
// 3. Import the `ChatCompletionSettings`s.
for result in dump_reader.chat_completions_settings()? {
let (name, settings) = result?;
index_scheduler.put_chat_settings(&name, &settings)?;
}
// 4. Import the runtime features and network
let features = dump_reader.features()?.unwrap_or_default();
index_scheduler.put_runtime_features(features)?;
let network = dump_reader.network()?.cloned().unwrap_or_default();
index_scheduler.put_network(network)?;
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
// 4.1 Use all cpus to process dump if `max_indexing_threads` not configured
let backup_config;
let base_config = index_scheduler.indexer_config();
@ -519,7 +528,7 @@ fn import_dump(
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
// try to process tasks while we're trying to import the indexes.
// 4. Import the indexes.
// 5. Import the indexes.
for index_reader in dump_reader.indexes()? {
let mut index_reader = index_reader?;
let metadata = index_reader.metadata();
@ -532,20 +541,20 @@ fn import_dump(
let mut wtxn = index.write_txn()?;
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
// 4.1 Import the primary key if there is one.
// 5.1 Import the primary key if there is one.
if let Some(ref primary_key) = metadata.primary_key {
builder.set_primary_key(primary_key.to_string());
}
// 4.2 Import the settings.
// 5.2 Import the settings.
tracing::info!("Importing the settings.");
let settings = index_reader.settings()?;
apply_settings_to_builder(&settings, &mut builder);
builder
.execute(|indexing_step| tracing::debug!("update: {:?}", indexing_step), || false)?;
let embedder_stats: Arc<EmbedderStats> = Default::default();
builder.execute(&|| false, &progress, embedder_stats.clone())?;
// 4.3 Import the documents.
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
// 5.3 Import the documents.
// 5.3.1 We need to recreate the grenad+obkv format accepted by the index.
tracing::info!("Importing the documents.");
let file = tempfile::tempfile()?;
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
@ -556,11 +565,11 @@ fn import_dump(
// This flush the content of the batch builder.
let file = builder.into_inner()?.into_inner()?;
// 4.3.2 We feed it to the milli index.
// 5.3.2 We feed it to the milli index.
let reader = BufReader::new(file);
let reader = DocumentsBatchReader::from_reader(reader)?;
let embedder_configs = index.embedding_configs(&wtxn)?;
let embedder_configs = index.embedding_configs().embedding_configs(&wtxn)?;
let embedders = index_scheduler.embedders(uid.to_string(), embedder_configs)?;
let builder = milli::update::IndexDocuments::new(
@ -573,6 +582,7 @@ fn import_dump(
},
|indexing_step| tracing::trace!("update: {:?}", indexing_step),
|| false,
&embedder_stats,
)?;
let builder = builder.with_embedders(embedders);
@ -587,15 +597,15 @@ fn import_dump(
index_scheduler.refresh_index_stats(&uid)?;
}
// 5. Import the queue
// 6. Import the queue
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
// 5.1. Import the batches
// 6.1. Import the batches
for ret in dump_reader.batches()? {
let batch = ret?;
index_scheduler_dump.register_dumped_batch(batch)?;
}
// 5.2. Import the tasks
// 6.2. Import the tasks
for ret in dump_reader.tasks()? {
let (task, file) = ret?;
index_scheduler_dump.register_dumped_task(task, file)?;

View File

@ -15,6 +15,33 @@ lazy_static! {
"Meilisearch number of degraded search requests"
))
.expect("Can't create a metric");
pub static ref MEILISEARCH_CHAT_SEARCH_REQUESTS: IntCounterVec = register_int_counter_vec!(
opts!(
"meilisearch_chat_search_requests",
"Meilisearch number of search requests performed by the chat route itself"
),
&["type"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
opts!("meilisearch_chat_prompt_tokens_usage", "Meilisearch Chat Prompt Tokens Usage"),
&["workspace", "model"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE: IntCounterVec =
register_int_counter_vec!(
opts!(
"meilisearch_chat_completion_tokens_usage",
"Meilisearch Chat Completion Tokens Usage"
),
&["workspace", "model"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
opts!("meilisearch_chat_total_tokens_usage", "Meilisearch Chat Total Tokens Usage"),
&["workspace", "model"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes"))
.expect("Can't create a metric");

View File

@ -53,6 +53,8 @@ const MEILI_EXPERIMENTAL_DUMPLESS_UPGRADE: &str = "MEILI_EXPERIMENTAL_DUMPLESS_U
const MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS: &str = "MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS";
const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE";
const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER";
const MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS: &str =
"MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS";
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE";
const MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER: &str = "MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER";
@ -62,9 +64,10 @@ const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
const MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS: &str =
"MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS";
const MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE: &str =
"MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_SIZE";
"MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE";
const MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES: &str =
"MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES";
const MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION: &str = "MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION";
const DEFAULT_CONFIG_FILE_PATH: &str = "./config.toml";
const DEFAULT_DB_PATH: &str = "./data.ms";
const DEFAULT_HTTP_ADDR: &str = "localhost:7700";
@ -455,6 +458,15 @@ pub struct Opt {
#[serde(default = "default_embedding_cache_entries")]
pub experimental_embedding_cache_entries: usize,
/// Experimental no snapshot compaction feature.
///
/// When enabled, Meilisearch will not compact snapshots during creation.
///
/// For more information, see <https://github.com/orgs/meilisearch/discussions/833>.
#[clap(long, env = MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION)]
#[serde(default)]
pub experimental_no_snapshot_compaction: bool,
#[serde(flatten)]
#[clap(flatten)]
pub indexer_options: IndexerOpts,
@ -559,6 +571,7 @@ impl Opt {
experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size,
experimental_embedding_cache_entries,
experimental_no_snapshot_compaction,
} = self;
export_to_env_if_not_present(MEILI_DB_PATH, db_path);
export_to_env_if_not_present(MEILI_HTTP_ADDR, http_addr);
@ -655,6 +668,10 @@ impl Opt {
MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES,
experimental_embedding_cache_entries.to_string(),
);
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION,
experimental_no_snapshot_compaction.to_string(),
);
indexer_options.export_to_env();
}
@ -734,12 +751,25 @@ pub struct IndexerOpts {
#[clap(skip)]
#[serde(skip)]
pub skip_index_budget: bool,
/// Experimental no edition 2024 for settings feature. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/847>
///
/// Enables the experimental no edition 2024 for settings feature.
#[clap(long, env = MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS)]
#[serde(default)]
pub experimental_no_edition_2024_for_settings: bool,
}
impl IndexerOpts {
/// Exports the values to their corresponding env vars if they are not set.
pub fn export_to_env(self) {
let IndexerOpts { max_indexing_memory, max_indexing_threads, skip_index_budget: _ } = self;
let IndexerOpts {
max_indexing_memory,
max_indexing_threads,
skip_index_budget: _,
experimental_no_edition_2024_for_settings,
} = self;
if let Some(max_indexing_memory) = max_indexing_memory.0 {
export_to_env_if_not_present(
MEILI_MAX_INDEXING_MEMORY,
@ -752,6 +782,12 @@ impl IndexerOpts {
max_indexing_threads.to_string(),
);
}
if experimental_no_edition_2024_for_settings {
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS,
experimental_no_edition_2024_for_settings.to_string(),
);
}
}
}
@ -770,7 +806,12 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
max_threads: *other.max_indexing_threads,
max_positions_per_attributes: None,
skip_index_budget: other.skip_index_budget,
..Default::default()
experimental_no_edition_2024_for_settings: other
.experimental_no_edition_2024_for_settings,
chunk_compression_type: Default::default(),
chunk_compression_level: Default::default(),
documents_chunk_size: Default::default(),
max_nb_chunks: Default::default(),
})
}
}

View File

@ -0,0 +1,135 @@
use std::collections::BinaryHeap;
use serde_json::{json, Value};
use crate::analytics::Aggregate;
#[derive(Default)]
pub struct ChatCompletionAggregator {
// requests
total_received: usize,
total_succeeded: usize,
time_spent: BinaryHeap<usize>,
// chat completion specific metrics
total_messages: usize,
total_streamed_requests: usize,
total_non_streamed_requests: usize,
// model usage tracking
models_used: std::collections::HashMap<String, usize>,
}
impl ChatCompletionAggregator {
pub fn from_request(model: &str, message_count: usize, is_stream: bool) -> Self {
let mut models_used = std::collections::HashMap::new();
models_used.insert(model.to_string(), 1);
Self {
total_received: 1,
total_succeeded: 0,
time_spent: BinaryHeap::new(),
total_messages: message_count,
total_streamed_requests: if is_stream { 1 } else { 0 },
total_non_streamed_requests: if is_stream { 0 } else { 1 },
models_used,
}
}
pub fn succeed(&mut self, time_spent: std::time::Duration) {
self.total_succeeded += 1;
self.time_spent.push(time_spent.as_millis() as usize);
}
}
impl Aggregate for ChatCompletionAggregator {
fn event_name(&self) -> &'static str {
"Chat Completion POST"
}
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
let Self {
total_received,
total_succeeded,
mut time_spent,
total_messages,
total_streamed_requests,
total_non_streamed_requests,
models_used,
..
} = *new;
// Aggregate time spent
self.time_spent.append(&mut time_spent);
// Aggregate counters
self.total_received = self.total_received.saturating_add(total_received);
self.total_succeeded = self.total_succeeded.saturating_add(total_succeeded);
self.total_messages = self.total_messages.saturating_add(total_messages);
self.total_streamed_requests =
self.total_streamed_requests.saturating_add(total_streamed_requests);
self.total_non_streamed_requests =
self.total_non_streamed_requests.saturating_add(total_non_streamed_requests);
// Aggregate model usage
for (model, count) in models_used {
*self.models_used.entry(model).or_insert(0) += count;
}
self
}
fn into_event(self: Box<Self>) -> Value {
let Self {
total_received,
total_succeeded,
time_spent,
total_messages,
total_streamed_requests,
total_non_streamed_requests,
models_used,
..
} = *self;
// Compute time statistics
let time_spent: Vec<usize> = time_spent.into_sorted_vec();
let (max_time, min_time, avg_time) = if time_spent.is_empty() {
(0, 0, 0)
} else {
let max_time = time_spent.last().unwrap_or(&0);
let min_time = time_spent.first().unwrap_or(&0);
let sum: usize = time_spent.iter().sum();
let avg_time = sum / time_spent.len();
(*max_time, *min_time, avg_time)
};
// Compute average messages per request
let avg_messages_per_request =
if total_received > 0 { total_messages as f64 / total_received as f64 } else { 0.0 };
// Compute streaming vs non-streaming proportions
let streaming_ratio = if total_received > 0 {
total_streamed_requests as f64 / total_received as f64
} else {
0.0
};
json!({
"total_received": total_received,
"total_succeeded": total_succeeded,
"time_spent": {
"max": max_time,
"min": min_time,
"avg": avg_time
},
"total_messages": total_messages,
"avg_messages_per_request": avg_messages_per_request,
"total_streamed_requests": total_streamed_requests,
"total_non_streamed_requests": total_non_streamed_requests,
"streaming_ratio": streaming_ratio,
"models_used": models_used,
})
}
}

View File

@ -0,0 +1,804 @@
use std::collections::HashMap;
use std::fmt::Write as _;
use std::mem;
use std::ops::ControlFlow;
use std::time::Duration;
use actix_web::web::{self, Data};
use actix_web::{Either, HttpRequest, HttpResponse, Responder};
use actix_web_lab::sse::{Event, Sse};
use async_openai::types::{
ChatCompletionMessageToolCall, ChatCompletionMessageToolCallChunk,
ChatCompletionRequestAssistantMessageArgs, ChatCompletionRequestDeveloperMessage,
ChatCompletionRequestDeveloperMessageContent, ChatCompletionRequestMessage,
ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent,
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
ChatCompletionStreamOptions, ChatCompletionStreamResponseDelta, ChatCompletionToolArgs,
ChatCompletionToolType, CreateChatCompletionRequest, CreateChatCompletionStreamResponse,
FinishReason, FunctionCall, FunctionCallStream, FunctionObjectArgs,
};
use async_openai::Client;
use bumpalo::Bump;
use futures::StreamExt;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::features::{
ChatCompletionPrompts as DbChatCompletionPrompts,
ChatCompletionSource as DbChatCompletionSource, SystemRole,
};
use meilisearch_types::keys::actions;
use meilisearch_types::milli::index::ChatConfig;
use meilisearch_types::milli::{all_obkv_to_json, obkv_to_json, TimeBudget};
use meilisearch_types::{Document, Index};
use serde::Deserialize;
use serde_json::json;
use tokio::runtime::Handle;
use tokio::sync::mpsc::error::SendError;
use super::chat_completion_analytics::ChatCompletionAggregator;
use super::config::Config;
use super::errors::{MistralError, OpenAiOutsideError, StreamErrorEvent};
use super::utils::format_documents;
use super::{
ChatsParam, MEILI_APPEND_CONVERSATION_MESSAGE_NAME, MEILI_SEARCH_IN_INDEX_FUNCTION_NAME,
MEILI_SEARCH_PROGRESS_NAME, MEILI_SEARCH_SOURCES_NAME,
};
use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
use crate::metrics::{
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE, MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE,
MEILISEARCH_CHAT_SEARCH_REQUESTS, MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE,
MEILISEARCH_DEGRADED_SEARCH_REQUESTS,
};
use crate::routes::chats::utils::SseEventSender;
use crate::routes::indexes::search::search_kind;
use crate::search::{add_search_rules, prepare_search, search_from_kind, SearchQuery};
use crate::search_queue::SearchQueue;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(chat)));
}
/// Get a chat completion
async fn chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_COMPLETIONS }>, Data<IndexScheduler>>,
auth_ctrl: web::Data<AuthController>,
chats_param: web::Path<ChatsParam>,
req: HttpRequest,
search_queue: web::Data<SearchQueue>,
web::Json(chat_completion): web::Json<CreateChatCompletionRequest>,
analytics: web::Data<Analytics>,
) -> impl Responder {
let ChatsParam { workspace_uid } = chats_param.into_inner();
if chat_completion.stream.unwrap_or(false) {
Either::Right(
streamed_chat(
index_scheduler,
auth_ctrl,
search_queue,
&workspace_uid,
req,
chat_completion,
analytics,
)
.await,
)
} else {
Either::Left(
non_streamed_chat(
index_scheduler,
auth_ctrl,
search_queue,
&workspace_uid,
req,
chat_completion,
analytics,
)
.await,
)
}
}
#[derive(Default, Debug, Clone, Copy)]
pub struct FunctionSupport {
/// Defines if we can call the _meiliSearchProgress function
/// to inform the front-end about what we are searching for.
report_progress: bool,
/// Defines if we can call the _meiliSearchSources function
/// to inform the front-end about the sources of the search.
report_sources: bool,
/// Defines if we can call the _meiliAppendConversationMessage
/// function to provide the messages to append into the conversation.
append_to_conversation: bool,
}
/// Setup search tool in chat completion request
fn setup_search_tool(
index_scheduler: &Data<IndexScheduler>,
filters: &meilisearch_auth::AuthFilter,
chat_completion: &mut CreateChatCompletionRequest,
prompts: &DbChatCompletionPrompts,
system_role: SystemRole,
) -> Result<FunctionSupport, ResponseError> {
let tools = chat_completion.tools.get_or_insert_default();
for tool in &tools[..] {
match tool.function.name.as_str() {
MEILI_SEARCH_IN_INDEX_FUNCTION_NAME => {
return Err(ResponseError::from_msg(
format!("{MEILI_SEARCH_IN_INDEX_FUNCTION_NAME} function is already defined."),
Code::BadRequest,
));
}
MEILI_SEARCH_PROGRESS_NAME
| MEILI_SEARCH_SOURCES_NAME
| MEILI_APPEND_CONVERSATION_MESSAGE_NAME => (),
external_function_name => {
return Err(ResponseError::from_msg(
format!("{external_function_name}: External functions are not supported yet."),
Code::UnimplementedExternalFunctionCalling,
));
}
}
}
// Remove internal tools used for front-end notifications as they should be hidden from the LLM.
let mut report_progress = false;
let mut report_sources = false;
let mut append_to_conversation = false;
tools.retain(|tool| {
match tool.function.name.as_str() {
MEILI_SEARCH_PROGRESS_NAME => {
report_progress = true;
false
}
MEILI_SEARCH_SOURCES_NAME => {
report_sources = true;
false
}
MEILI_APPEND_CONVERSATION_MESSAGE_NAME => {
append_to_conversation = true;
false
}
_ => true, // keep other tools
}
});
let mut index_uids = Vec::new();
let mut function_description = prompts.search_description.clone();
index_scheduler.try_for_each_index::<_, ()>(|name, index| {
// Make sure to skip unauthorized indexes
if !filters.is_index_authorized(name) {
return Ok(());
}
let rtxn = index.read_txn()?;
let chat_config = index.chat_config(&rtxn)?;
let index_description = chat_config.description;
let _ = writeln!(&mut function_description, "\n\n - {name}: {index_description}\n");
index_uids.push(name.to_string());
Ok(())
})?;
let tool = ChatCompletionToolArgs::default()
.r#type(ChatCompletionToolType::Function)
.function(
FunctionObjectArgs::default()
.name(MEILI_SEARCH_IN_INDEX_FUNCTION_NAME)
.description(&function_description)
.parameters(json!({
"type": "object",
"properties": {
"index_uid": {
"type": "string",
"enum": index_uids,
"description": prompts.search_index_uid_param,
},
"q": {
// Unfortunately, Mistral does not support an array of types, here.
// "type": ["string", "null"],
"type": "string",
"description": prompts.search_q_param,
}
},
"required": ["index_uid", "q"],
"additionalProperties": false,
}))
.strict(true)
.build()
.unwrap(),
)
.build()
.unwrap();
tools.push(tool);
let system_message = match system_role {
SystemRole::System => {
ChatCompletionRequestMessage::System(ChatCompletionRequestSystemMessage {
content: ChatCompletionRequestSystemMessageContent::Text(prompts.system.clone()),
name: None,
})
}
SystemRole::Developer => {
ChatCompletionRequestMessage::Developer(ChatCompletionRequestDeveloperMessage {
content: ChatCompletionRequestDeveloperMessageContent::Text(prompts.system.clone()),
name: None,
})
}
};
chat_completion.messages.insert(0, system_message);
Ok(FunctionSupport { report_progress, report_sources, append_to_conversation })
}
/// Process search request and return formatted results
async fn process_search_request(
index_scheduler: &GuardedData<
ActionPolicy<{ actions::CHAT_COMPLETIONS }>,
Data<IndexScheduler>,
>,
auth_ctrl: web::Data<AuthController>,
search_queue: &web::Data<SearchQueue>,
auth_token: &str,
index_uid: String,
q: Option<String>,
) -> Result<(Index, Vec<Document>, String), ResponseError> {
let index = index_scheduler.index(&index_uid)?;
let rtxn = index.static_read_txn()?;
let ChatConfig { description: _, prompt: _, search_parameters } = index.chat_config(&rtxn)?;
let mut query = SearchQuery { q, ..SearchQuery::from(search_parameters) };
let auth_filter = ActionPolicy::<{ actions::SEARCH }>::authenticate(
auth_ctrl,
auth_token,
Some(index_uid.as_str()),
)?;
// Tenant token search_rules.
if let Some(search_rules) = auth_filter.get_index_search_rules(&index_uid) {
add_search_rules(&mut query.filter, search_rules);
}
let search_kind =
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
let permit = search_queue.try_get_search_permit().await?;
let features = index_scheduler.features();
let index_cloned = index.clone();
let output = tokio::task::spawn_blocking(move || -> Result<_, ResponseError> {
let time_budget = match index_cloned
.search_cutoff(&rtxn)
.map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.clone())))?
{
Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)),
None => TimeBudget::default(),
};
let (search, _is_finite_pagination, _max_total_hits, _offset) =
prepare_search(&index_cloned, &rtxn, &query, &search_kind, time_budget, features)?;
search_from_kind(index_uid, search_kind, search)
.map(|(search_results, _)| (rtxn, search_results))
.map_err(ResponseError::from)
})
.await;
permit.drop().await;
let output = output?;
let mut documents = Vec::new();
if let Ok((ref rtxn, ref search_result)) = output {
MEILISEARCH_CHAT_SEARCH_REQUESTS.with_label_values(&["internal"]).inc();
if search_result.degraded {
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
}
let fields_ids_map = index.fields_ids_map(rtxn)?;
let displayed_fields = index.displayed_fields_ids(rtxn)?;
for &document_id in &search_result.documents_ids {
let obkv = index.document(rtxn, document_id)?;
let document = match displayed_fields {
Some(ref fields) => obkv_to_json(fields, &fields_ids_map, obkv)?,
None => all_obkv_to_json(obkv, &fields_ids_map)?,
};
documents.push(document);
}
}
let (rtxn, search_result) = output?;
let render_alloc = Bump::new();
let formatted = format_documents(&rtxn, &index, &render_alloc, search_result.documents_ids)?;
let text = formatted.join("\n");
drop(rtxn);
Ok((index, documents, text))
}
#[allow(unreachable_code, unused_variables)] // will be correctly implemented in the future
async fn non_streamed_chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_COMPLETIONS }>, Data<IndexScheduler>>,
auth_ctrl: web::Data<AuthController>,
search_queue: web::Data<SearchQueue>,
workspace_uid: &str,
req: HttpRequest,
chat_completion: CreateChatCompletionRequest,
analytics: web::Data<Analytics>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
// Create analytics aggregator
let aggregate = ChatCompletionAggregator::from_request(
&chat_completion.model,
chat_completion.messages.len(),
false, // non_streamed_chat is not streaming
);
let start_time = std::time::Instant::now();
if let Some(n) = chat_completion.n.filter(|&n| n != 1) {
return Err(ResponseError::from_msg(
format!("You tried to specify n = {n} but only single choices are supported (n = 1)."),
Code::UnimplementedMultiChoiceChatCompletions,
));
}
return Err(ResponseError::from_msg(
"Non-streamed chat completions is not implemented".to_string(),
Code::UnimplementedNonStreamingChatCompletions,
));
let filters = index_scheduler.filters();
let chat_settings = match index_scheduler.chat_settings(workspace_uid).unwrap() {
Some(settings) => settings,
None => {
return Err(ResponseError::from_msg(
format!("Chat `{workspace_uid}` not found"),
Code::ChatNotFound,
))
}
};
let config = Config::new(&chat_settings);
let client = Client::with_config(config);
let auth_token = extract_token_from_request(&req)?.unwrap();
let system_role = chat_settings.source.system_role(&chat_completion.model);
// TODO do function support later
let _function_support = setup_search_tool(
&index_scheduler,
filters,
&mut chat_completion,
&chat_settings.prompts,
system_role,
)?;
let mut response;
loop {
response = client.chat().create(chat_completion.clone()).await.unwrap();
let choice = &mut response.choices[0];
match choice.finish_reason {
Some(FinishReason::ToolCalls) => {
let tool_calls = mem::take(&mut choice.message.tool_calls).unwrap_or_default();
let (meili_calls, other_calls): (Vec<_>, Vec<_>) = tool_calls
.into_iter()
.partition(|call| call.function.name == MEILI_SEARCH_IN_INDEX_FUNCTION_NAME);
chat_completion.messages.push(
ChatCompletionRequestAssistantMessageArgs::default()
.tool_calls(meili_calls.clone())
.build()
.unwrap()
.into(),
);
for call in meili_calls {
let result = match serde_json::from_str(&call.function.arguments) {
Ok(SearchInIndexParameters { index_uid, q }) => process_search_request(
&index_scheduler,
auth_ctrl.clone(),
&search_queue,
auth_token,
index_uid,
q,
)
.await
.map_err(|e| e.to_string()),
Err(err) => Err(err.to_string()),
};
// TODO report documents sources later
let answer = match result {
Ok((_, _documents, text)) => text,
Err(err) => err,
};
chat_completion.messages.push(ChatCompletionRequestMessage::Tool(
ChatCompletionRequestToolMessage {
tool_call_id: call.id.clone(),
content: ChatCompletionRequestToolMessageContent::Text(answer),
},
));
}
// Let the client call other tools by themselves
if !other_calls.is_empty() {
response.choices[0].message.tool_calls = Some(other_calls);
break;
}
}
_ => break,
}
}
// Record success in analytics
let mut aggregate = aggregate;
aggregate.succeed(start_time.elapsed());
analytics.publish(aggregate, &req);
Ok(HttpResponse::Ok().json(response))
}
async fn streamed_chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_COMPLETIONS }>, Data<IndexScheduler>>,
auth_ctrl: web::Data<AuthController>,
search_queue: web::Data<SearchQueue>,
workspace_uid: &str,
req: HttpRequest,
mut chat_completion: CreateChatCompletionRequest,
analytics: web::Data<Analytics>,
) -> Result<impl Responder, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
let filters = index_scheduler.filters();
if let Some(n) = chat_completion.n.filter(|&n| n != 1) {
return Err(ResponseError::from_msg(
format!("You tried to specify n = {n} but only single choices are supported (n = 1)."),
Code::UnimplementedMultiChoiceChatCompletions,
));
}
let chat_settings = match index_scheduler.chat_settings(workspace_uid)? {
Some(settings) => settings,
None => {
return Err(ResponseError::from_msg(
format!("Chat `{workspace_uid}` not found"),
Code::ChatNotFound,
))
}
};
// Create analytics aggregator
let mut aggregate = ChatCompletionAggregator::from_request(
&chat_completion.model,
chat_completion.messages.len(),
true, // streamed_chat is always streaming
);
let start_time = std::time::Instant::now();
let config = Config::new(&chat_settings);
let auth_token = extract_token_from_request(&req)?.unwrap().to_string();
let system_role = chat_settings.source.system_role(&chat_completion.model);
let function_support = setup_search_tool(
&index_scheduler,
filters,
&mut chat_completion,
&chat_settings.prompts,
system_role,
)?;
tracing::debug!("Conversation function support: {function_support:?}");
let (tx, rx) = tokio::sync::mpsc::channel(10);
let tx = SseEventSender::new(tx);
let workspace_uid = workspace_uid.to_string();
let _join_handle = Handle::current().spawn(async move {
let client = Client::with_config(config.clone());
let mut global_tool_calls = HashMap::<u32, Call>::new();
// Limit the number of internal calls to satisfy the search requests of the LLM
for _ in 0..20 {
let output = run_conversation(
&index_scheduler,
&auth_ctrl,
&workspace_uid,
&search_queue,
&auth_token,
&client,
chat_settings.source,
&mut chat_completion,
&tx,
&mut global_tool_calls,
function_support,
);
match output.await {
Ok(ControlFlow::Continue(())) => (),
Ok(ControlFlow::Break(_finish_reason)) => break,
// If the connection is closed we must stop
Err(SendError(_)) => return,
}
}
let _ = tx.stop().await;
});
// Record success in analytics after the stream is set up
aggregate.succeed(start_time.elapsed());
analytics.publish(aggregate, &req);
Ok(Sse::from_infallible_receiver(rx).with_retry_duration(Duration::from_secs(10)))
}
/// Updates the chat completion with the new messages, streams the LLM tokens,
/// and report progress and errors.
#[allow(clippy::too_many_arguments)]
async fn run_conversation<C: async_openai::config::Config>(
index_scheduler: &GuardedData<
ActionPolicy<{ actions::CHAT_COMPLETIONS }>,
Data<IndexScheduler>,
>,
auth_ctrl: &web::Data<AuthController>,
workspace_uid: &str,
search_queue: &web::Data<SearchQueue>,
auth_token: &str,
client: &Client<C>,
source: DbChatCompletionSource,
chat_completion: &mut CreateChatCompletionRequest,
tx: &SseEventSender,
global_tool_calls: &mut HashMap<u32, Call>,
function_support: FunctionSupport,
) -> Result<ControlFlow<Option<FinishReason>, ()>, SendError<Event>> {
use DbChatCompletionSource::*;
let mut finish_reason = None;
chat_completion.stream_options = match source {
OpenAi | AzureOpenAi => Some(ChatCompletionStreamOptions { include_usage: true }),
Mistral | VLlm => None,
};
// safety: unwrap: can only happens if `stream` was set to `false`
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
while let Some(result) = response.next().await {
match result {
Ok(resp) => {
if let Some(usage) = resp.usage.as_ref() {
MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE
.with_label_values(&[workspace_uid, &chat_completion.model])
.inc_by(usage.prompt_tokens as u64);
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE
.with_label_values(&[workspace_uid, &chat_completion.model])
.inc_by(usage.completion_tokens as u64);
MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE
.with_label_values(&[workspace_uid, &chat_completion.model])
.inc_by(usage.total_tokens as u64);
}
let choice = match resp.choices.first() {
Some(choice) => choice,
None => break,
};
finish_reason = choice.finish_reason;
let ChatCompletionStreamResponseDelta { ref tool_calls, .. } = &choice.delta;
match tool_calls {
Some(tool_calls) => {
for chunk in tool_calls {
let ChatCompletionMessageToolCallChunk {
index,
id,
r#type: _,
function,
} = chunk;
let FunctionCallStream { name, arguments } = function.as_ref().unwrap();
global_tool_calls
.entry(*index)
.and_modify(|call| {
if call.is_internal() {
call.append(arguments.as_ref().unwrap())
}
})
.or_insert_with(|| {
if name.as_deref() == Some(MEILI_SEARCH_IN_INDEX_FUNCTION_NAME)
{
Call::Internal {
id: id.as_ref().unwrap().clone(),
function_name: name.as_ref().unwrap().clone(),
arguments: arguments.as_ref().unwrap().clone(),
}
} else {
Call::External
}
});
}
}
None => {
if !global_tool_calls.is_empty() {
let (meili_calls, _other_calls): (Vec<_>, Vec<_>) =
mem::take(global_tool_calls)
.into_values()
.flat_map(|call| match call {
Call::Internal { id, function_name: name, arguments } => {
Some(ChatCompletionMessageToolCall {
id,
r#type: Some(ChatCompletionToolType::Function),
function: FunctionCall { name, arguments },
})
}
Call::External => None,
})
.partition(|call| {
call.function.name == MEILI_SEARCH_IN_INDEX_FUNCTION_NAME
});
chat_completion.messages.push(
ChatCompletionRequestAssistantMessageArgs::default()
.tool_calls(meili_calls.clone())
.build()
.unwrap()
.into(),
);
handle_meili_tools(
index_scheduler,
auth_ctrl,
search_queue,
auth_token,
tx,
meili_calls,
chat_completion,
&resp,
function_support,
)
.await?;
} else {
tx.forward_response(&resp).await?;
}
}
}
}
Err(error) => {
let result = match source {
DbChatCompletionSource::Mistral => {
StreamErrorEvent::from_openai_error::<MistralError>(error).await
}
_ => StreamErrorEvent::from_openai_error::<OpenAiOutsideError>(error).await,
};
let error = result.unwrap_or_else(StreamErrorEvent::from_reqwest_error);
tx.send_error(&error).await?;
return Ok(ControlFlow::Break(None));
}
}
}
// We must stop if the finish reason is not something we can solve with Meilisearch
match finish_reason {
Some(FinishReason::ToolCalls) => Ok(ControlFlow::Continue(())),
otherwise => Ok(ControlFlow::Break(otherwise)),
}
}
#[allow(clippy::too_many_arguments)]
async fn handle_meili_tools(
index_scheduler: &GuardedData<
ActionPolicy<{ actions::CHAT_COMPLETIONS }>,
Data<IndexScheduler>,
>,
auth_ctrl: &web::Data<AuthController>,
search_queue: &web::Data<SearchQueue>,
auth_token: &str,
tx: &SseEventSender,
meili_calls: Vec<ChatCompletionMessageToolCall>,
chat_completion: &mut CreateChatCompletionRequest,
resp: &CreateChatCompletionStreamResponse,
FunctionSupport { report_progress, report_sources, append_to_conversation, .. }: FunctionSupport,
) -> Result<(), SendError<Event>> {
for call in meili_calls {
if report_progress {
tx.report_search_progress(
resp.clone(),
&call.id,
&call.function.name,
&call.function.arguments,
)
.await?;
}
if append_to_conversation {
tx.append_tool_call_conversation_message(
resp.clone(),
call.id.clone(),
call.function.name.clone(),
call.function.arguments.clone(),
)
.await?;
}
let mut error = None;
let result = match serde_json::from_str(&call.function.arguments) {
Ok(SearchInIndexParameters { index_uid, q }) => match process_search_request(
index_scheduler,
auth_ctrl.clone(),
search_queue,
auth_token,
index_uid,
q,
)
.await
{
Ok(output) => Ok(output),
Err(err) => {
let error_text = format!("the search tool call failed with {err}");
error = Some(err);
Err(error_text)
}
},
Err(err) => Err(err.to_string()),
};
let answer = match result {
Ok((_index, documents, text)) => {
if report_sources {
tx.report_sources(resp.clone(), &call.id, &documents).await?;
}
text
}
Err(err) => err,
};
let tool = ChatCompletionRequestMessage::Tool(ChatCompletionRequestToolMessage {
tool_call_id: call.id.clone(),
content: ChatCompletionRequestToolMessageContent::Text(answer),
});
if append_to_conversation {
tx.append_conversation_message(resp.clone(), &tool).await?;
}
chat_completion.messages.push(tool);
if let Some(error) = error {
tx.send_error(&StreamErrorEvent::from_response_error(error)).await?;
}
}
Ok(())
}
/// The structure used to aggregate the function calls to make.
#[derive(Debug)]
enum Call {
/// Tool calls to tools that must be managed by Meilisearch internally.
/// Typically the search functions.
Internal { id: String, function_name: String, arguments: String },
/// Tool calls that we track but only to know that its not our functions.
/// We return the function calls as-is to the end-user.
External,
}
impl Call {
fn is_internal(&self) -> bool {
matches!(self, Call::Internal { .. })
}
/// # Panics
///
/// - if called on external calls
fn append(&mut self, more: &str) {
match self {
Call::Internal { arguments, .. } => arguments.push_str(more),
Call::External => panic!("Cannot append argument chunks to an external function"),
}
}
}
#[derive(Deserialize)]
struct SearchInIndexParameters {
/// The index uid to search in.
index_uid: String,
/// The query parameter to use.
q: Option<String>,
}

View File

@ -0,0 +1,88 @@
use async_openai::config::{AzureConfig, OpenAIConfig};
use meilisearch_types::features::ChatCompletionSettings as DbChatSettings;
use reqwest::header::HeaderMap;
use secrecy::SecretString;
#[derive(Debug, Clone)]
pub enum Config {
OpenAiCompatible(OpenAIConfig),
AzureOpenAiCompatible(AzureConfig),
}
impl Config {
pub fn new(chat_settings: &DbChatSettings) -> Self {
use meilisearch_types::features::ChatCompletionSource::*;
match chat_settings.source {
OpenAi | Mistral | VLlm => {
let mut config = OpenAIConfig::default();
if let Some(org_id) = chat_settings.org_id.as_ref() {
config = config.with_org_id(org_id);
}
if let Some(project_id) = chat_settings.project_id.as_ref() {
config = config.with_project_id(project_id);
}
if let Some(api_key) = chat_settings.api_key.as_ref() {
config = config.with_api_key(api_key);
}
let base_url = chat_settings.base_url.as_deref();
if let Some(base_url) = chat_settings.source.base_url().or(base_url) {
config = config.with_api_base(base_url);
}
Self::OpenAiCompatible(config)
}
AzureOpenAi => {
let mut config = AzureConfig::default();
if let Some(version) = chat_settings.api_version.as_ref() {
config = config.with_api_version(version);
}
if let Some(deployment_id) = chat_settings.deployment_id.as_ref() {
config = config.with_deployment_id(deployment_id);
}
if let Some(api_key) = chat_settings.api_key.as_ref() {
config = config.with_api_key(api_key);
}
if let Some(base_url) = chat_settings.base_url.as_ref() {
config = config.with_api_base(base_url);
}
Self::AzureOpenAiCompatible(config)
}
}
}
}
impl async_openai::config::Config for Config {
fn headers(&self) -> HeaderMap {
match self {
Config::OpenAiCompatible(config) => config.headers(),
Config::AzureOpenAiCompatible(config) => config.headers(),
}
}
fn url(&self, path: &str) -> String {
match self {
Config::OpenAiCompatible(config) => config.url(path),
Config::AzureOpenAiCompatible(config) => config.url(path),
}
}
fn query(&self) -> Vec<(&str, &str)> {
match self {
Config::OpenAiCompatible(config) => config.query(),
Config::AzureOpenAiCompatible(config) => config.query(),
}
}
fn api_base(&self) -> &str {
match self {
Config::OpenAiCompatible(config) => config.api_base(),
Config::AzureOpenAiCompatible(config) => config.api_base(),
}
}
fn api_key(&self) -> &SecretString {
match self {
Config::OpenAiCompatible(config) => config.api_key(),
Config::AzureOpenAiCompatible(config) => config.api_key(),
}
}
}

View File

@ -0,0 +1,250 @@
use async_openai::error::{ApiError, OpenAIError};
use async_openai::reqwest_eventsource::Error as EventSourceError;
use meilisearch_types::error::ResponseError;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// The error type which is always `error`.
const ERROR_TYPE: &str = "error";
/// The error struct returned by the Mistral API.
///
/// ```json
/// {
/// "object": "error",
/// "message": "Service tier capacity exceeded for this model.",
/// "type": "invalid_request_error",
/// "param": null,
/// "code": null
/// }
/// ```
#[derive(Debug, Clone, Deserialize)]
pub struct MistralError {
message: String,
r#type: String,
param: Option<String>,
code: Option<String>,
}
impl From<MistralError> for StreamErrorEvent {
fn from(error: MistralError) -> Self {
let MistralError { message, r#type, param, code } = error;
StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_owned(),
error: StreamError { r#type, code, message, param, event_id: None },
}
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct OpenAiOutsideError {
/// Emitted when an error occurs.
error: OpenAiInnerError,
}
/// Emitted when an error occurs.
#[derive(Debug, Clone, Deserialize)]
pub struct OpenAiInnerError {
/// The error code.
code: Option<String>,
/// The error message.
message: String,
/// The error parameter.
param: Option<String>,
/// The type of the event. Always `error`.
r#type: String,
}
impl From<OpenAiOutsideError> for StreamErrorEvent {
fn from(error: OpenAiOutsideError) -> Self {
let OpenAiOutsideError { error: OpenAiInnerError { code, message, param, r#type } } = error;
StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError { r#type, code, message, param, event_id: None },
}
}
}
/// An error that occurs during the streaming process.
///
/// It directly comes from the OpenAI API and you can
/// read more about error events on their website:
/// <https://platform.openai.com/docs/api-reference/realtime-server-events/error>
#[derive(Debug, Serialize, Deserialize)]
pub struct StreamErrorEvent {
/// The unique ID of the server event.
pub event_id: String,
/// The event type, must be error.
pub r#type: String,
/// Details of the error.
pub error: StreamError,
}
/// Details of the error.
#[derive(Debug, Serialize, Deserialize)]
pub struct StreamError {
/// The type of error (e.g., "invalid_request_error", "server_error").
pub r#type: String,
/// Error code, if any.
pub code: Option<String>,
/// A human-readable error message.
pub message: String,
/// Parameter related to the error, if any.
pub param: Option<String>,
/// The event_id of the client event that caused the error, if applicable.
pub event_id: Option<String>,
}
impl StreamErrorEvent {
pub async fn from_openai_error<E>(error: OpenAIError) -> Result<Self, reqwest::Error>
where
E: serde::de::DeserializeOwned,
Self: From<E>,
{
match error {
OpenAIError::Reqwest(e) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "internal_reqwest_error".to_string(),
code: Some("internal".to_string()),
message: e.to_string(),
param: None,
event_id: None,
},
}),
OpenAIError::ApiError(ApiError { message, r#type, param, code }) => {
Ok(StreamErrorEvent {
r#type: ERROR_TYPE.to_string(),
event_id: Uuid::new_v4().to_string(),
error: StreamError {
r#type: r#type.unwrap_or_else(|| "unknown".to_string()),
code,
message,
param,
event_id: None,
},
})
}
OpenAIError::JSONDeserialize(error) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "json_deserialize_error".to_string(),
code: Some("internal".to_string()),
message: error.to_string(),
param: None,
event_id: None,
},
}),
OpenAIError::FileSaveError(_) | OpenAIError::FileReadError(_) => unreachable!(),
OpenAIError::StreamError(error) => match error {
EventSourceError::InvalidStatusCode(_status_code, response) => {
let error = response.json::<E>().await?;
Ok(StreamErrorEvent::from(error))
}
EventSourceError::InvalidContentType(_header_value, response) => {
let error = response.json::<E>().await?;
Ok(StreamErrorEvent::from(error))
}
EventSourceError::Utf8(error) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "invalid_utf8_error".to_string(),
code: None,
message: error.to_string(),
param: None,
event_id: None,
},
}),
EventSourceError::Parser(error) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "parser_error".to_string(),
code: None,
message: error.to_string(),
param: None,
event_id: None,
},
}),
EventSourceError::Transport(error) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "transport_error".to_string(),
code: None,
message: error.to_string(),
param: None,
event_id: None,
},
}),
EventSourceError::InvalidLastEventId(message) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "invalid_last_event_id".to_string(),
code: None,
message,
param: None,
event_id: None,
},
}),
EventSourceError::StreamEnded => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "stream_ended".to_string(),
code: None,
message: "Stream ended".to_string(),
param: None,
event_id: None,
},
}),
},
OpenAIError::InvalidArgument(message) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "invalid_argument".to_string(),
code: None,
message,
param: None,
event_id: None,
},
}),
}
}
pub fn from_response_error(error: ResponseError) -> Self {
let ResponseError { code, message, .. } = error;
StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "response_error".to_string(),
code: Some(code.as_str().to_string()),
message,
param: None,
event_id: None,
},
}
}
pub fn from_reqwest_error(error: reqwest::Error) -> Self {
StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "reqwest_error".to_string(),
code: None,
message: error.to_string(),
param: None,
event_id: None,
},
}
}
}

View File

@ -0,0 +1,135 @@
use actix_web::web::{self, Data};
use actix_web::HttpResponse;
use deserr::actix_web::AwebQueryParameter;
use deserr::Deserr;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::deserr_codes::{InvalidIndexLimit, InvalidIndexOffset};
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::keys::actions;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tracing::debug;
use utoipa::{IntoParams, ToSchema};
use super::Pagination;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::routes::PAGINATION_DEFAULT_LIMIT;
mod chat_completion_analytics;
pub mod chat_completions;
mod config;
mod errors;
pub mod settings;
mod utils;
/// The function name to report search progress.
/// This function is used to report on what meilisearch is
/// doing which must be used on the frontend to report progress.
const MEILI_SEARCH_PROGRESS_NAME: &str = "_meiliSearchProgress";
/// The function name to append a conversation message in the user conversation.
/// This function is used to append a conversation message in the user conversation.
/// This must be used on the frontend to keep context of what happened on the
/// Meilisearch-side and keep good context for follow up questions.
const MEILI_APPEND_CONVERSATION_MESSAGE_NAME: &str = "_meiliAppendConversationMessage";
/// The function name to report sources to the frontend.
/// This function is used to report sources to the frontend.
/// The call id is associated to the one used by the search progress function.
const MEILI_SEARCH_SOURCES_NAME: &str = "_meiliSearchSources";
/// The *internal* function name to provide to the LLM to search in indexes.
/// This function must not leak to the user as the LLM will call it and the
/// main goal of Meilisearch is to provide an answer to these calls.
const MEILI_SEARCH_IN_INDEX_FUNCTION_NAME: &str = "_meiliSearchInIndex";
#[derive(Deserialize)]
pub struct ChatsParam {
workspace_uid: String,
}
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(list_workspaces))).service(
web::scope("/{workspace_uid}")
.service(
web::resource("")
.route(web::get().to(get_chat))
.route(web::delete().to(delete_chat)),
)
.service(web::scope("/chat/completions").configure(chat_completions::configure))
.service(web::scope("/settings").configure(settings::configure)),
);
}
pub async fn get_chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHATS_GET }>, Data<IndexScheduler>>,
workspace_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("displaying a chat")?;
let workspace_uid = IndexUid::try_from(workspace_uid.into_inner())?;
if index_scheduler.chat_workspace_exists(&workspace_uid)? {
Ok(HttpResponse::Ok().json(json!({ "uid": workspace_uid })))
} else {
Err(ResponseError::from_msg(format!("chat {workspace_uid} not found"), Code::ChatNotFound))
}
}
pub async fn delete_chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHATS_DELETE }>, Data<IndexScheduler>>,
workspace_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("deleting a chat")?;
let workspace_uid = workspace_uid.into_inner();
if index_scheduler.delete_chat_settings(&workspace_uid)? {
Ok(HttpResponse::NoContent().finish())
} else {
Err(ResponseError::from_msg(format!("chat {workspace_uid} not found"), Code::ChatNotFound))
}
}
#[derive(Deserr, Debug, Clone, Copy, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
pub struct ListChats {
/// The number of chat workspaces to skip before starting to retrieve anything
#[param(value_type = Option<usize>, default, example = 100)]
#[deserr(default, error = DeserrQueryParamError<InvalidIndexOffset>)]
pub offset: Param<usize>,
/// The number of chat workspaces to retrieve
#[param(value_type = Option<usize>, default = 20, example = 1)]
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidIndexLimit>)]
pub limit: Param<usize>,
}
impl ListChats {
fn as_pagination(self) -> Pagination {
Pagination { offset: self.offset.0, limit: self.limit.0 }
}
}
#[derive(Debug, Serialize, Clone, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ChatWorkspaceView {
/// Unique identifier for the index
pub uid: String,
}
pub async fn list_workspaces(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHATS_GET }>, Data<IndexScheduler>>,
paginate: AwebQueryParameter<ListChats, DeserrQueryParamError>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("listing the chats")?;
debug!(parameters = ?paginate, "List chat workspaces");
let (total, workspaces) =
index_scheduler.paginated_chat_workspace_uids(*paginate.offset, *paginate.limit)?;
let workspaces =
workspaces.into_iter().map(|uid| ChatWorkspaceView { uid }).collect::<Vec<_>>();
let ret = paginate.as_pagination().format_with(total, workspaces);
debug!(returns = ?ret, "List chat workspaces");
Ok(HttpResponse::Ok().json(ret))
}

View File

@ -0,0 +1,258 @@
use actix_web::web::{self, Data};
use actix_web::HttpResponse;
use deserr::Deserr;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::features::{
ChatCompletionPrompts as DbChatCompletionPrompts, ChatCompletionSettings,
ChatCompletionSource as DbChatCompletionSource, DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT,
DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT, DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT,
DEFAULT_CHAT_SYSTEM_PROMPT,
};
use meilisearch_types::keys::actions;
use meilisearch_types::milli::update::Setting;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
use super::ChatsParam;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
.route(web::get().to(SeqHandler(get_settings)))
.route(web::patch().to(SeqHandler(patch_settings)))
.route(web::delete().to(SeqHandler(reset_settings))),
);
}
async fn get_settings(
index_scheduler: GuardedData<
ActionPolicy<{ actions::CHATS_SETTINGS_GET }>,
Data<IndexScheduler>,
>,
chats_param: web::Path<ChatsParam>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats/settings route")?;
let ChatsParam { workspace_uid } = chats_param.into_inner();
let mut settings = match index_scheduler.chat_settings(&workspace_uid)? {
Some(settings) => settings,
None => {
return Err(ResponseError::from_msg(
format!("Chat `{workspace_uid}` not found"),
Code::ChatNotFound,
))
}
};
settings.hide_secrets();
Ok(HttpResponse::Ok().json(settings))
}
async fn patch_settings(
index_scheduler: GuardedData<
ActionPolicy<{ actions::CHATS_SETTINGS_UPDATE }>,
Data<IndexScheduler>,
>,
chats_param: web::Path<ChatsParam>,
web::Json(new): web::Json<ChatWorkspaceSettings>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats/settings route")?;
let ChatsParam { workspace_uid } = chats_param.into_inner();
let old_settings = index_scheduler.chat_settings(&workspace_uid)?.unwrap_or_default();
let prompts = match new.prompts {
Setting::Set(new_prompts) => DbChatCompletionPrompts {
system: match new_prompts.system {
Setting::Set(new_system) => new_system,
Setting::Reset => DEFAULT_CHAT_SYSTEM_PROMPT.to_string(),
Setting::NotSet => old_settings.prompts.system,
},
search_description: match new_prompts.search_description {
Setting::Set(new_description) => new_description,
Setting::Reset => DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT.to_string(),
Setting::NotSet => old_settings.prompts.search_description,
},
search_q_param: match new_prompts.search_q_param {
Setting::Set(new_description) => new_description,
Setting::Reset => DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT.to_string(),
Setting::NotSet => old_settings.prompts.search_q_param,
},
search_index_uid_param: match new_prompts.search_index_uid_param {
Setting::Set(new_description) => new_description,
Setting::Reset => DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT.to_string(),
Setting::NotSet => old_settings.prompts.search_index_uid_param,
},
},
Setting::Reset => DbChatCompletionPrompts::default(),
Setting::NotSet => old_settings.prompts,
};
let mut settings = ChatCompletionSettings {
source: match new.source {
Setting::Set(new_source) => new_source.into(),
Setting::Reset => DbChatCompletionSource::default(),
Setting::NotSet => old_settings.source,
},
org_id: match new.org_id {
Setting::Set(new_org_id) => Some(new_org_id),
Setting::Reset => None,
Setting::NotSet => old_settings.org_id,
},
project_id: match new.project_id {
Setting::Set(new_project_id) => Some(new_project_id),
Setting::Reset => None,
Setting::NotSet => old_settings.project_id,
},
api_version: match new.api_version {
Setting::Set(new_api_version) => Some(new_api_version),
Setting::Reset => None,
Setting::NotSet => old_settings.api_version,
},
deployment_id: match new.deployment_id {
Setting::Set(new_deployment_id) => Some(new_deployment_id),
Setting::Reset => None,
Setting::NotSet => old_settings.deployment_id,
},
base_url: match new.base_url {
Setting::Set(new_base_url) => Some(new_base_url),
Setting::Reset => None,
Setting::NotSet => old_settings.base_url,
},
api_key: match new.api_key {
Setting::Set(new_api_key) => Some(new_api_key),
Setting::Reset => None,
Setting::NotSet => old_settings.api_key,
},
prompts,
};
// TODO send analytics
// analytics.publish(
// PatchNetworkAnalytics {
// network_size: merged_remotes.len(),
// network_has_self: merged_self.is_some(),
// },
// &req,
// );
settings.validate()?;
index_scheduler.put_chat_settings(&workspace_uid, &settings)?;
settings.hide_secrets();
Ok(HttpResponse::Ok().json(settings))
}
async fn reset_settings(
index_scheduler: GuardedData<
ActionPolicy<{ actions::CHATS_SETTINGS_UPDATE }>,
Data<IndexScheduler>,
>,
chats_param: web::Path<ChatsParam>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats/settings route")?;
let ChatsParam { workspace_uid } = chats_param.into_inner();
if index_scheduler.chat_settings(&workspace_uid)?.is_some() {
let settings = Default::default();
index_scheduler.put_chat_settings(&workspace_uid, &settings)?;
Ok(HttpResponse::Ok().json(settings))
} else {
Err(ResponseError::from_msg(
format!("Chat `{workspace_uid}` not found"),
Code::ChatNotFound,
))
}
}
#[derive(Debug, Clone, Deserialize, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct ChatWorkspaceSettings {
#[serde(default)]
#[deserr(default)]
#[schema(value_type = Option<ChatCompletionSource>)]
pub source: Setting<ChatCompletionSource>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionOrgId>)]
#[schema(value_type = Option<String>, example = json!("dcba4321..."))]
pub org_id: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionProjectId>)]
#[schema(value_type = Option<String>, example = json!("4321dcba..."))]
pub project_id: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionApiVersion>)]
#[schema(value_type = Option<String>, example = json!("2024-02-01"))]
pub api_version: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionDeploymentId>)]
#[schema(value_type = Option<String>, example = json!("1234abcd..."))]
pub deployment_id: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionBaseApi>)]
#[schema(value_type = Option<String>, example = json!("https://api.mistral.ai/v1"))]
pub base_url: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionApiKey>)]
#[schema(value_type = Option<String>, example = json!("abcd1234..."))]
pub api_key: Setting<String>,
#[serde(default)]
#[deserr(default)]
#[schema(inline, value_type = Option<ChatPrompts>)]
pub prompts: Setting<ChatPrompts>,
}
#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, Deserr, ToSchema)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub enum ChatCompletionSource {
#[default]
OpenAi,
Mistral,
AzureOpenAi,
VLlm,
}
impl From<ChatCompletionSource> for DbChatCompletionSource {
fn from(source: ChatCompletionSource) -> Self {
use ChatCompletionSource::*;
match source {
OpenAi => DbChatCompletionSource::OpenAi,
Mistral => DbChatCompletionSource::Mistral,
AzureOpenAi => DbChatCompletionSource::AzureOpenAi,
VLlm => DbChatCompletionSource::VLlm,
}
}
}
#[derive(Debug, Clone, Deserialize, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct ChatPrompts {
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSystemPrompt>)]
#[schema(value_type = Option<String>, example = json!("You are a helpful assistant..."))]
pub system: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchDescriptionPrompt>)]
#[schema(value_type = Option<String>, example = json!("This is the search function..."))]
pub search_description: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchQueryParamPrompt>)]
#[schema(value_type = Option<String>, example = json!("This is query parameter..."))]
pub search_q_param: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchIndexUidParamPrompt>)]
#[schema(value_type = Option<String>, example = json!("This is index you want to search in..."))]
pub search_index_uid_param: Setting<String>,
}

View File

@ -0,0 +1,253 @@
use std::cell::RefCell;
use std::sync::RwLock;
use actix_web_lab::sse::{self, Event};
use async_openai::types::{
ChatChoiceStream, ChatCompletionMessageToolCall, ChatCompletionMessageToolCallChunk,
ChatCompletionRequestAssistantMessage, ChatCompletionRequestMessage,
ChatCompletionStreamResponseDelta, ChatCompletionToolType, CreateChatCompletionStreamResponse,
FunctionCall, FunctionCallStream, Role,
};
use bumpalo::Bump;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::heed::RoTxn;
use meilisearch_types::milli::index::ChatConfig;
use meilisearch_types::milli::prompt::{Prompt, PromptData};
use meilisearch_types::milli::update::new::document::DocumentFromDb;
use meilisearch_types::milli::{
DocumentId, FieldIdMapWithMetadata, GlobalFieldsIdsMap, MetadataBuilder,
};
use meilisearch_types::{Document, Index};
use serde::Serialize;
use tokio::sync::mpsc::error::SendError;
use tokio::sync::mpsc::Sender;
use super::errors::StreamErrorEvent;
use super::MEILI_APPEND_CONVERSATION_MESSAGE_NAME;
use crate::routes::chats::{MEILI_SEARCH_PROGRESS_NAME, MEILI_SEARCH_SOURCES_NAME};
pub struct SseEventSender(Sender<Event>);
impl SseEventSender {
pub fn new(sender: Sender<Event>) -> Self {
Self(sender)
}
/// Ask the front-end user to append this tool *call* to the conversation
pub async fn append_tool_call_conversation_message(
&self,
resp: CreateChatCompletionStreamResponse,
call_id: String,
function_name: String,
function_arguments: String,
) -> Result<(), SendError<Event>> {
#[allow(deprecated)] // function_call
let message =
ChatCompletionRequestMessage::Assistant(ChatCompletionRequestAssistantMessage {
content: None,
refusal: None,
name: None,
audio: None,
tool_calls: Some(vec![ChatCompletionMessageToolCall {
id: call_id,
r#type: Some(ChatCompletionToolType::Function),
function: FunctionCall { name: function_name, arguments: function_arguments },
}]),
function_call: None,
});
self.append_conversation_message(resp, &message).await
}
/// Ask the front-end user to append this tool to the conversation
pub async fn append_conversation_message(
&self,
mut resp: CreateChatCompletionStreamResponse,
message: &ChatCompletionRequestMessage,
) -> Result<(), SendError<Event>> {
let call_text = serde_json::to_string(message).unwrap();
let tool_call = ChatCompletionMessageToolCallChunk {
index: 0,
id: Some(uuid::Uuid::new_v4().to_string()),
r#type: Some(ChatCompletionToolType::Function),
function: Some(FunctionCallStream {
name: Some(MEILI_APPEND_CONVERSATION_MESSAGE_NAME.to_string()),
arguments: Some(call_text),
}),
};
resp.choices[0] = ChatChoiceStream {
index: 0,
#[allow(deprecated)] // function_call
delta: ChatCompletionStreamResponseDelta {
content: None,
function_call: None,
tool_calls: Some(vec![tool_call]),
role: Some(Role::Assistant),
refusal: None,
},
finish_reason: None,
logprobs: None,
};
self.send_json(&resp).await
}
pub async fn report_search_progress(
&self,
mut resp: CreateChatCompletionStreamResponse,
call_id: &str,
function_name: &str,
function_arguments: &str,
) -> Result<(), SendError<Event>> {
#[derive(Debug, Clone, Serialize)]
/// Provides information about the current Meilisearch search operation.
struct MeiliSearchProgress<'a> {
/// The call ID to track the sources of the search.
call_id: &'a str,
/// The name of the function we are executing.
function_name: &'a str,
/// The arguments of the function we are executing, encoded in JSON.
function_arguments: &'a str,
}
let progress = MeiliSearchProgress { call_id, function_name, function_arguments };
let call_text = serde_json::to_string(&progress).unwrap();
let tool_call = ChatCompletionMessageToolCallChunk {
index: 0,
id: Some(uuid::Uuid::new_v4().to_string()),
r#type: Some(ChatCompletionToolType::Function),
function: Some(FunctionCallStream {
name: Some(MEILI_SEARCH_PROGRESS_NAME.to_string()),
arguments: Some(call_text),
}),
};
resp.choices[0] = ChatChoiceStream {
index: 0,
#[allow(deprecated)] // function_call
delta: ChatCompletionStreamResponseDelta {
content: None,
function_call: None,
tool_calls: Some(vec![tool_call]),
role: Some(Role::Assistant),
refusal: None,
},
finish_reason: None,
logprobs: None,
};
self.send_json(&resp).await
}
pub async fn report_sources(
&self,
mut resp: CreateChatCompletionStreamResponse,
call_id: &str,
documents: &[Document],
) -> Result<(), SendError<Event>> {
#[derive(Debug, Clone, Serialize)]
/// Provides sources of the search.
struct MeiliSearchSources<'a> {
/// The call ID to track the original search associated to those sources.
call_id: &'a str,
/// The documents associated with the search (call_id).
/// Only the displayed attributes of the documents are returned.
sources: &'a [Document],
}
let sources = MeiliSearchSources { call_id, sources: documents };
let call_text = serde_json::to_string(&sources).unwrap();
let tool_call = ChatCompletionMessageToolCallChunk {
index: 0,
id: Some(uuid::Uuid::new_v4().to_string()),
r#type: Some(ChatCompletionToolType::Function),
function: Some(FunctionCallStream {
name: Some(MEILI_SEARCH_SOURCES_NAME.to_string()),
arguments: Some(call_text),
}),
};
resp.choices[0] = ChatChoiceStream {
index: 0,
#[allow(deprecated)] // function_call
delta: ChatCompletionStreamResponseDelta {
content: None,
function_call: None,
tool_calls: Some(vec![tool_call]),
role: Some(Role::Assistant),
refusal: None,
},
finish_reason: None,
logprobs: None,
};
self.send_json(&resp).await
}
pub async fn forward_response(
&self,
resp: &CreateChatCompletionStreamResponse,
) -> Result<(), SendError<Event>> {
self.send_json(resp).await
}
pub async fn send_error(&self, error: &StreamErrorEvent) -> Result<(), SendError<Event>> {
self.send_json(error).await
}
pub async fn stop(self) -> Result<(), SendError<Event>> {
// It is the way OpenAI sends a correct end of stream
// <https://platform.openai.com/docs/api-reference/assistants-streaming/events>
const DONE_DATA: &str = "[DONE]";
self.0.send(Event::Data(sse::Data::new(DONE_DATA))).await
}
async fn send_json<S: Serialize>(&self, data: &S) -> Result<(), SendError<Event>> {
self.0.send(Event::Data(sse::Data::new_json(data).unwrap())).await
}
}
/// Format documents based on the provided template and maximum bytes.
///
/// This formatting function is usually used to generate a summary of the documents for LLMs.
pub fn format_documents<'doc>(
rtxn: &RoTxn<'_>,
index: &Index,
doc_alloc: &'doc Bump,
internal_docids: Vec<DocumentId>,
) -> Result<Vec<&'doc str>, ResponseError> {
let ChatConfig { prompt: PromptData { template, max_bytes }, .. } = index.chat_config(rtxn)?;
let prompt = Prompt::new(template, max_bytes).unwrap();
let fid_map = index.fields_ids_map(rtxn)?;
let metadata_builder = MetadataBuilder::from_index(index, rtxn)?;
let fid_map_with_meta = FieldIdMapWithMetadata::new(fid_map.clone(), metadata_builder);
let global = RwLock::new(fid_map_with_meta);
let gfid_map = RefCell::new(GlobalFieldsIdsMap::new(&global));
let external_ids: Vec<String> = index
.external_id_of(rtxn, internal_docids.iter().copied())?
.into_iter()
.collect::<Result<_, _>>()?;
let mut renders = Vec::new();
for (docid, external_docid) in internal_docids.into_iter().zip(external_ids) {
let document = match DocumentFromDb::new(docid, rtxn, index, &fid_map)? {
Some(doc) => doc,
None => unreachable!("Document with internal ID {docid} not found"),
};
let text = match prompt.render_document(&external_docid, document, &gfid_map, doc_alloc) {
Ok(text) => text,
Err(err) => {
return Err(ResponseError::from_msg(
err.to_string(),
Code::InvalidChatSettingDocumentTemplate,
))
}
};
renders.push(text);
}
Ok(renders)
}

View File

@ -0,0 +1,183 @@
use std::collections::BTreeMap;
use std::convert::Infallible;
use std::str::FromStr as _;
use actix_web::web::{self, Data};
use actix_web::{HttpRequest, HttpResponse};
use byte_unit::Byte;
use deserr::actix_web::AwebJson;
use deserr::Deserr;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::keys::actions;
use meilisearch_types::tasks::{ExportIndexSettings as DbExportIndexSettings, KindWithContent};
use serde::Serialize;
use serde_json::Value;
use tracing::debug;
use utoipa::{OpenApi, ToSchema};
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::routes::export_analytics::ExportAnalytics;
use crate::routes::{get_task_id, is_dry_run, SummarizedTaskView};
use crate::Opt;
#[derive(OpenApi)]
#[openapi(
paths(export),
tags((
name = "Export",
description = "The `/export` route allows you to trigger an export process to a remote Meilisearch instance.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/export"),
)),
)]
pub struct ExportApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(export)));
}
#[utoipa::path(
post,
path = "",
tag = "Export",
security(("Bearer" = ["export", "*"])),
responses(
(status = 202, description = "Export successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 1,
"status": "enqueued",
"type": "export",
"enqueuedAt": "2021-08-11T09:25:53.000000Z"
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn export(
index_scheduler: GuardedData<ActionPolicy<{ actions::EXPORT }>, Data<IndexScheduler>>,
export: AwebJson<Export, DeserrJsonError>,
req: HttpRequest,
opt: web::Data<Opt>,
analytics: Data<Analytics>,
) -> Result<HttpResponse, ResponseError> {
let export = export.into_inner();
debug!(returns = ?export, "Trigger export");
let analytics_aggregate = ExportAnalytics::from_export(&export);
let Export { url, api_key, payload_size, indexes } = export;
let indexes = match indexes {
Some(indexes) => indexes
.into_iter()
.map(|(pattern, ExportIndexSettings { filter, override_settings })| {
(pattern, DbExportIndexSettings { filter, override_settings })
})
.collect(),
None => BTreeMap::from([(
IndexUidPattern::new_unchecked("*"),
DbExportIndexSettings::default(),
)]),
};
let task = KindWithContent::Export {
url,
api_key,
payload_size: payload_size.map(|ByteWithDeserr(bytes)| bytes),
indexes,
};
let uid = get_task_id(&req, &opt)?;
let dry_run = is_dry_run(&req, &opt)?;
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
.await??
.into();
analytics.publish(analytics_aggregate, &req);
Ok(HttpResponse::Ok().json(task))
}
#[derive(Debug, Deserr, ToSchema, Serialize)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct Export {
#[schema(value_type = Option<String>, example = json!("https://ms-1234.heaven.meilisearch.com"))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportUrl>)]
pub url: String,
#[schema(value_type = Option<String>, example = json!("1234abcd"))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportApiKey>)]
pub api_key: Option<String>,
#[schema(value_type = Option<String>, example = json!("24MiB"))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportPayloadSize>)]
pub payload_size: Option<ByteWithDeserr>,
#[schema(value_type = Option<BTreeMap<String, ExportIndexSettings>>, example = json!({ "*": { "filter": null } }))]
#[deserr(default)]
#[serde(default)]
pub indexes: Option<BTreeMap<IndexUidPattern, ExportIndexSettings>>,
}
/// A wrapper around the `Byte` type that implements `Deserr`.
#[derive(Debug, Serialize)]
#[serde(transparent)]
pub struct ByteWithDeserr(pub Byte);
impl<E> deserr::Deserr<E> for ByteWithDeserr
where
E: deserr::DeserializeError,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
use deserr::{ErrorKind, Value, ValueKind};
match value {
Value::Integer(integer) => Ok(ByteWithDeserr(Byte::from_u64(integer))),
Value::String(string) => Byte::from_str(&string).map(ByteWithDeserr).map_err(|e| {
deserr::take_cf_content(E::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: e.to_string() },
location,
))
}),
actual => Err(deserr::take_cf_content(E::error(
None,
ErrorKind::IncorrectValueKind {
actual,
accepted: &[ValueKind::Integer, ValueKind::String],
},
location,
))),
}
}
}
#[derive(Debug, Deserr, ToSchema, Serialize)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct ExportIndexSettings {
#[schema(value_type = Option<String>, example = json!("genres = action"))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportIndexFilter>)]
pub filter: Option<Value>,
#[schema(value_type = Option<bool>, example = json!(true))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportIndexOverrideSettings>)]
pub override_settings: bool,
}

View File

@ -0,0 +1,111 @@
use url::Url;
use crate::analytics::Aggregate;
use crate::routes::export::Export;
#[derive(Default)]
pub struct ExportAnalytics {
total_received: usize,
has_api_key: bool,
sum_exports_meilisearch_cloud: usize,
sum_index_patterns: usize,
sum_patterns_with_filter: usize,
sum_patterns_with_override_settings: usize,
payload_sizes: Vec<u64>,
}
impl ExportAnalytics {
pub fn from_export(export: &Export) -> Self {
let Export { url, api_key, payload_size, indexes } = export;
let url = Url::parse(url).ok();
let is_meilisearch_cloud = url.as_ref().and_then(Url::host_str).is_some_and(|host| {
host.ends_with("meilisearch.dev")
|| host.ends_with("meilisearch.com")
|| host.ends_with("meilisearch.io")
});
let has_api_key = api_key.is_some();
let index_patterns_count = indexes.as_ref().map_or(0, |indexes| indexes.len());
let patterns_with_filter_count = indexes.as_ref().map_or(0, |indexes| {
indexes.values().filter(|settings| settings.filter.is_some()).count()
});
let patterns_with_override_settings_count = indexes.as_ref().map_or(0, |indexes| {
indexes.values().filter(|settings| settings.override_settings).count()
});
let payload_sizes =
if let Some(crate::routes::export::ByteWithDeserr(byte_size)) = payload_size {
vec![byte_size.as_u64()]
} else {
vec![]
};
Self {
total_received: 1,
has_api_key,
sum_exports_meilisearch_cloud: is_meilisearch_cloud as usize,
sum_index_patterns: index_patterns_count,
sum_patterns_with_filter: patterns_with_filter_count,
sum_patterns_with_override_settings: patterns_with_override_settings_count,
payload_sizes,
}
}
}
impl Aggregate for ExportAnalytics {
fn event_name(&self) -> &'static str {
"Export Triggered"
}
fn aggregate(mut self: Box<Self>, other: Box<Self>) -> Box<Self> {
self.total_received += other.total_received;
self.has_api_key |= other.has_api_key;
self.sum_exports_meilisearch_cloud += other.sum_exports_meilisearch_cloud;
self.sum_index_patterns += other.sum_index_patterns;
self.sum_patterns_with_filter += other.sum_patterns_with_filter;
self.sum_patterns_with_override_settings += other.sum_patterns_with_override_settings;
self.payload_sizes.extend(other.payload_sizes);
self
}
fn into_event(self: Box<Self>) -> serde_json::Value {
let avg_payload_size = if self.payload_sizes.is_empty() {
None
} else {
Some(self.payload_sizes.iter().sum::<u64>() / self.payload_sizes.len() as u64)
};
let avg_exports_meilisearch_cloud = if self.total_received == 0 {
None
} else {
Some(self.sum_exports_meilisearch_cloud as f64 / self.total_received as f64)
};
let avg_index_patterns = if self.total_received == 0 {
None
} else {
Some(self.sum_index_patterns as f64 / self.total_received as f64)
};
let avg_patterns_with_filter = if self.total_received == 0 {
None
} else {
Some(self.sum_patterns_with_filter as f64 / self.total_received as f64)
};
let avg_patterns_with_override_settings = if self.total_received == 0 {
None
} else {
Some(self.sum_patterns_with_override_settings as f64 / self.total_received as f64)
};
serde_json::json!({
"total_received": self.total_received,
"has_api_key": self.has_api_key,
"avg_exports_meilisearch_cloud": avg_exports_meilisearch_cloud,
"avg_index_patterns": avg_index_patterns,
"avg_patterns_with_filter": avg_patterns_with_filter,
"avg_patterns_with_override_settings": avg_patterns_with_override_settings,
"avg_payload_size": avg_payload_size,
})
}
}

View File

@ -53,6 +53,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
network: Some(false),
get_task_documents_route: Some(false),
composite_embedders: Some(false),
chat_completions: Some(false),
multimodal: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
@ -97,6 +99,10 @@ pub struct RuntimeTogglableFeatures {
pub get_task_documents_route: Option<bool>,
#[deserr(default)]
pub composite_embedders: Option<bool>,
#[deserr(default)]
pub chat_completions: Option<bool>,
#[deserr(default)]
pub multimodal: Option<bool>,
}
impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures {
@ -109,6 +115,8 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
network,
get_task_documents_route,
composite_embedders,
chat_completions,
multimodal,
} = value;
Self {
@ -119,6 +127,8 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
network: Some(network),
get_task_documents_route: Some(get_task_documents_route),
composite_embedders: Some(composite_embedders),
chat_completions: Some(chat_completions),
multimodal: Some(multimodal),
}
}
}
@ -132,6 +142,8 @@ pub struct PatchExperimentalFeatureAnalytics {
network: bool,
get_task_documents_route: bool,
composite_embedders: bool,
chat_completions: bool,
multimodal: bool,
}
impl Aggregate for PatchExperimentalFeatureAnalytics {
@ -148,6 +160,8 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
network: new.network,
get_task_documents_route: new.get_task_documents_route,
composite_embedders: new.composite_embedders,
chat_completions: new.chat_completions,
multimodal: new.multimodal,
})
}
@ -173,6 +187,8 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
network: Some(false),
get_task_documents_route: Some(false),
composite_embedders: Some(false),
chat_completions: Some(false),
multimodal: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
@ -214,6 +230,8 @@ async fn patch_features(
.0
.composite_embedders
.unwrap_or(old_features.composite_embedders),
chat_completions: new_features.0.chat_completions.unwrap_or(old_features.chat_completions),
multimodal: new_features.0.multimodal.unwrap_or(old_features.multimodal),
};
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
@ -227,6 +245,8 @@ async fn patch_features(
network,
get_task_documents_route,
composite_embedders,
chat_completions,
multimodal,
} = new_features;
analytics.publish(
@ -238,6 +258,8 @@ async fn patch_features(
network,
get_task_documents_route,
composite_embedders,
chat_completions,
multimodal,
},
&req,
);

View File

@ -1452,7 +1452,6 @@ fn some_documents<'a, 't: 'a>(
) -> Result<impl Iterator<Item = Result<Document, ResponseError>> + 'a, ResponseError> {
let fields_ids_map = index.fields_ids_map(rtxn)?;
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
let embedding_configs = index.embedding_configs(rtxn)?;
Ok(index.iter_documents(rtxn, doc_ids)?.map(move |ret| {
ret.map_err(ResponseError::from).and_then(|(key, document)| -> Result<_, ResponseError> {
@ -1468,15 +1467,9 @@ fn some_documents<'a, 't: 'a>(
Some(Value::Object(map)) => map,
_ => Default::default(),
};
for (name, vector) in index.embeddings(rtxn, key)? {
let user_provided = embedding_configs
.iter()
.find(|conf| conf.name == name)
.is_some_and(|conf| conf.user_provided.contains(key));
let embeddings = ExplicitVectors {
embeddings: Some(vector.into()),
regenerate: !user_provided,
};
for (name, (vector, regenerate)) in index.embeddings(rtxn, key)? {
let embeddings =
ExplicitVectors { embeddings: Some(vector.into()), regenerate };
vectors.insert(
name,
serde_json::to_value(embeddings).map_err(MeilisearchHttpError::from)?,

View File

@ -56,6 +56,8 @@ pub struct FacetSearchQuery {
pub q: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
pub vector: Option<Vec<f32>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
pub media: Option<Value>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
pub hybrid: Option<HybridQuery>,
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
@ -94,6 +96,7 @@ impl FacetSearchAggregator {
facet_name,
vector,
q,
media,
filter,
matching_strategy,
attributes_to_search_on,
@ -108,6 +111,7 @@ impl FacetSearchAggregator {
facet_names: Some(facet_name.clone()).into_iter().collect(),
additional_search_parameters_provided: q.is_some()
|| vector.is_some()
|| media.is_some()
|| filter.is_some()
|| *matching_strategy != MatchingStrategy::default()
|| attributes_to_search_on.is_some()
@ -291,6 +295,7 @@ impl From<FacetSearchQuery> for SearchQuery {
facet_name: _,
q,
vector,
media,
filter,
matching_strategy,
attributes_to_search_on,
@ -312,6 +317,7 @@ impl From<FacetSearchQuery> for SearchQuery {
SearchQuery {
q,
media,
offset: DEFAULT_SEARCH_OFFSET(),
limit: DEFAULT_SEARCH_LIMIT(),
page,

View File

@ -172,7 +172,7 @@ pub async fn list_indexes(
debug!(parameters = ?paginate, "List indexes");
let filters = index_scheduler.filters();
let (total, indexes) =
index_scheduler.get_paginated_indexes_stats(filters, *paginate.offset, *paginate.limit)?;
index_scheduler.paginated_indexes_stats(filters, *paginate.offset, *paginate.limit)?;
let indexes = indexes
.into_iter()
.map(|(name, stats)| IndexView {

View File

@ -205,6 +205,8 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
Ok(Self {
q: other.q,
// `media` not supported for `GET`
media: None,
vector: other.vector.map(CS::into_inner),
offset: other.offset.0,
limit: other.limit.0,
@ -481,28 +483,30 @@ pub fn search_kind(
index_uid: String,
index: &milli::Index,
) -> Result<SearchKind, ResponseError> {
let is_placeholder_query =
if let Some(q) = query.q.as_deref() { q.trim().is_empty() } else { true };
let non_placeholder_query = !is_placeholder_query;
let is_media = query.media.is_some();
// handle with care, the order of cases matters, the semantics is subtle
match (query.q.as_deref(), &query.hybrid, query.vector.as_deref()) {
// empty query, no vector => placeholder search
(Some(q), _, None) if q.trim().is_empty() => Ok(SearchKind::KeywordOnly),
// no query, no vector => placeholder search
(None, _, None) => Ok(SearchKind::KeywordOnly),
// hybrid.semantic_ratio == 1.0 => vector
(_, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => {
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
}
// hybrid.semantic_ratio == 0.0 => keyword
(_, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => {
match (is_media, non_placeholder_query, &query.hybrid, query.vector.as_deref()) {
// media + vector => error
(true, _, _, Some(_)) => Err(MeilisearchHttpError::MediaAndVector.into()),
// media + !hybrid => error
(true, _, None, _) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
// vector + !hybrid => error
(_, _, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
// hybrid S0 => keyword
(_, _, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => {
Ok(SearchKind::KeywordOnly)
}
// no query, hybrid, vector => semantic
(None, Some(HybridQuery { semantic_ratio: _, embedder }), Some(v)) => {
SearchKind::semantic(index_scheduler, index_uid, index, embedder, Some(v.len()))
// !q + !vector => placeholder search
(false, false, _, None) => Ok(SearchKind::KeywordOnly),
// hybrid S100 => semantic
(_, _, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => {
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
}
// query, no hybrid, no vector => keyword
(Some(_), None, None) => Ok(SearchKind::KeywordOnly),
// query, hybrid, maybe vector => hybrid
(Some(_), Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid(
// q + hybrid => hybrid
(_, true, Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid(
index_scheduler,
index_uid,
index,
@ -510,7 +514,11 @@ pub fn search_kind(
**semantic_ratio,
v.map(|v| v.len()),
),
(_, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
// !q + hybrid => semantic
(_, false, Some(HybridQuery { semantic_ratio: _, embedder }), v) => {
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
}
// q => keyword
(false, true, None, None) => Ok(SearchKind::KeywordOnly),
}
}

View File

@ -61,6 +61,8 @@ pub struct SearchAggregator<Method: AggregateMethod> {
semantic_ratio: bool,
hybrid: bool,
retrieve_vectors: bool,
// Number of requests containing `media`
total_media: usize,
// every time a search is done, we increment the counter linked to the used settings
matching_strategy: HashMap<String, usize>,
@ -101,6 +103,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
let SearchQuery {
q,
vector,
media,
offset,
limit,
page,
@ -175,6 +178,11 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
if let Some(ref vector) = vector {
ret.max_vector_size = vector.len();
}
if media.is_some() {
ret.total_media = 1;
}
ret.retrieve_vectors |= retrieve_vectors;
if query.is_finite_pagination() {
@ -277,6 +285,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
show_ranking_score_details,
semantic_ratio,
hybrid,
total_media,
total_degraded,
total_used_negative_operator,
ranking_score_threshold,
@ -327,6 +336,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
self.retrieve_vectors |= retrieve_vectors;
self.semantic_ratio |= semantic_ratio;
self.hybrid |= hybrid;
self.total_media += total_media;
// pagination
self.max_limit = self.max_limit.max(max_limit);
@ -403,6 +413,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
show_ranking_score_details,
semantic_ratio,
hybrid,
total_media,
total_degraded,
total_used_negative_operator,
ranking_score_threshold,
@ -450,6 +461,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
"hybrid": {
"enabled": hybrid,
"semantic_ratio": semantic_ratio,
"total_media": total_media,
},
"pagination": {
"max_limit": max_limit,

View File

@ -5,8 +5,9 @@ use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::update::Setting;
use meilisearch_types::settings::{
settings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
settings, ChatSettings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
};
use meilisearch_types::tasks::KindWithContent;
use tracing::debug;
@ -508,6 +509,17 @@ make_setting_routes!(
camelcase_attr: "prefixSearch",
analytics: PrefixSearchAnalytics
},
{
route: "/chat",
update_verb: put,
value_type: ChatSettings,
err_type: meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsIndexChat,
>,
attr: chat,
camelcase_attr: "chat",
analytics: ChatAnalytics
},
);
#[utoipa::path(
@ -597,6 +609,7 @@ pub async fn update_all(
),
facet_search: FacetSearchAnalytics::new(new_settings.facet_search.as_ref().set()),
prefix_search: PrefixSearchAnalytics::new(new_settings.prefix_search.as_ref().set()),
chat: ChatAnalytics::new(new_settings.chat.as_ref().set()),
},
&req,
);
@ -651,7 +664,11 @@ pub async fn get_all(
let index = index_scheduler.index(&index_uid)?;
let rtxn = index.read_txn()?;
let new_settings = settings(&index, &rtxn, SecretPolicy::HideSecrets)?;
let mut new_settings = settings(&index, &rtxn, SecretPolicy::HideSecrets)?;
if index_scheduler.features().check_chat_completions("showing index `chat` settings").is_err() {
new_settings.chat = Setting::NotSet;
}
debug!(returns = ?new_settings, "Get all settings");
Ok(HttpResponse::Ok().json(new_settings))
}
@ -738,8 +755,20 @@ fn validate_settings(
if matches!(embedder.indexing_embedder, Setting::Set(_)) {
features.check_composite_embedders("setting `indexingEmbedder`")?;
}
if matches!(embedder.indexing_fragments, Setting::Set(_)) {
features.check_multimodal("setting `indexingFragments`")?;
}
if matches!(embedder.search_fragments, Setting::Set(_)) {
features.check_multimodal("setting `searchFragments`")?;
}
}
}
if let Setting::Set(_chat) = &settings.chat {
features.check_chat_completions("setting `chat` in the index settings")?;
}
Ok(settings.validate()?)
}

View File

@ -10,8 +10,8 @@ use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
use meilisearch_types::milli::update::Setting;
use meilisearch_types::milli::FilterableAttributesRule;
use meilisearch_types::settings::{
FacetingSettings, PaginationSettings, PrefixSearchSettings, ProximityPrecisionView,
RankingRuleView, SettingEmbeddingSettings, TypoSettings,
ChatSettings, FacetingSettings, PaginationSettings, PrefixSearchSettings,
ProximityPrecisionView, RankingRuleView, SettingEmbeddingSettings, TypoSettings,
};
use serde::Serialize;
@ -39,6 +39,7 @@ pub struct SettingsAnalytics {
pub non_separator_tokens: NonSeparatorTokensAnalytics,
pub facet_search: FacetSearchAnalytics,
pub prefix_search: PrefixSearchAnalytics,
pub chat: ChatAnalytics,
}
impl Aggregate for SettingsAnalytics {
@ -198,6 +199,7 @@ impl Aggregate for SettingsAnalytics {
set: new.prefix_search.set | self.prefix_search.set,
value: new.prefix_search.value.or(self.prefix_search.value),
},
chat: ChatAnalytics { set: new.chat.set | self.chat.set },
})
}
@ -454,7 +456,9 @@ pub struct PaginationAnalytics {
impl PaginationAnalytics {
pub fn new(setting: Option<&PaginationSettings>) -> Self {
Self { max_total_hits: setting.as_ref().and_then(|s| s.max_total_hits.set()) }
Self {
max_total_hits: setting.as_ref().and_then(|s| s.max_total_hits.set().map(|x| x.into())),
}
}
pub fn into_settings(self) -> SettingsAnalytics {
@ -674,3 +678,18 @@ impl PrefixSearchAnalytics {
SettingsAnalytics { prefix_search: self, ..Default::default() }
}
}
#[derive(Serialize, Default)]
pub struct ChatAnalytics {
pub set: bool,
}
impl ChatAnalytics {
pub fn new(settings: Option<&ChatSettings>) -> Self {
Self { set: settings.is_some() }
}
pub fn into_settings(self) -> SettingsAnalytics {
SettingsAnalytics { chat: self, ..Default::default() }
}
}

View File

@ -2,6 +2,7 @@ use std::collections::BTreeMap;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use export::Export;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_types::batch_view::BatchView;
@ -52,7 +53,10 @@ const PAGINATION_DEFAULT_LIMIT_FN: fn() -> usize = || 20;
mod api_key;
pub mod batches;
pub mod chats;
mod dump;
mod export;
mod export_analytics;
pub mod features;
pub mod indexes;
mod logs;
@ -83,6 +87,7 @@ mod tasks_test;
(path = "/multi-search", api = multi_search::MultiSearchApi),
(path = "/swap-indexes", api = swap_indexes::SwapIndexesApi),
(path = "/experimental-features", api = features::ExperimentalFeaturesApi),
(path = "/export", api = export::ExportApi),
(path = "/network", api = network::NetworkApi),
),
paths(get_health, get_version, get_stats),
@ -94,7 +99,7 @@ mod tasks_test;
url = "/",
description = "Local server",
)),
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures))
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures, Export))
)]
pub struct MeilisearchApi;
@ -113,7 +118,9 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
.service(web::scope("/metrics").configure(metrics::configure))
.service(web::scope("/experimental-features").configure(features::configure))
.service(web::scope("/network").configure(network::configure));
.service(web::scope("/network").configure(network::configure))
.service(web::scope("/export").configure(export::configure))
.service(web::scope("/chats").configure(chats::configure));
#[cfg(feature = "swagger")]
{

View File

@ -42,6 +42,7 @@ impl MultiSearchAggregator {
federation_options,
q: _,
vector: _,
media: _,
offset: _,
limit: _,
page: _,

View File

@ -228,7 +228,7 @@ mod tests {
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r#"
{
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View File

@ -16,6 +16,7 @@ use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::heed::RoTxn;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::locales::Locale;
use meilisearch_types::milli::index::{self, SearchParameters};
use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy};
use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors;
use meilisearch_types::milli::vector::Embedder;
@ -63,6 +64,8 @@ pub struct SearchQuery {
pub q: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
pub vector: Option<Vec<f32>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
pub media: Option<serde_json::Value>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
pub hybrid: Option<HybridQuery>,
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
@ -119,9 +122,59 @@ pub struct SearchQuery {
pub locales: Option<Vec<Locale>>,
}
impl From<SearchParameters> for SearchQuery {
fn from(parameters: SearchParameters) -> Self {
let SearchParameters {
hybrid,
limit,
sort,
distinct,
matching_strategy,
attributes_to_search_on,
ranking_score_threshold,
} = parameters;
SearchQuery {
hybrid: hybrid.map(|index::HybridQuery { semantic_ratio, embedder }| HybridQuery {
semantic_ratio: SemanticRatio::try_from(semantic_ratio)
.ok()
.unwrap_or_else(DEFAULT_SEMANTIC_RATIO),
embedder,
}),
limit: limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
sort,
distinct,
matching_strategy: matching_strategy.map(MatchingStrategy::from).unwrap_or_default(),
attributes_to_search_on,
ranking_score_threshold: ranking_score_threshold.map(RankingScoreThreshold::from),
q: None,
vector: None,
media: None,
offset: DEFAULT_SEARCH_OFFSET(),
page: None,
hits_per_page: None,
attributes_to_retrieve: None,
retrieve_vectors: false,
attributes_to_crop: None,
crop_length: DEFAULT_CROP_LENGTH(),
attributes_to_highlight: None,
show_matches_position: false,
show_ranking_score: false,
show_ranking_score_details: false,
filter: None,
facets: None,
highlight_pre_tag: DEFAULT_HIGHLIGHT_PRE_TAG(),
highlight_post_tag: DEFAULT_HIGHLIGHT_POST_TAG(),
crop_marker: DEFAULT_CROP_MARKER(),
locales: None,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Deserr, ToSchema, Serialize)]
#[deserr(try_from(f64) = TryFrom::try_from -> InvalidSearchRankingScoreThreshold)]
pub struct RankingScoreThreshold(f64);
impl std::convert::TryFrom<f64> for RankingScoreThreshold {
type Error = InvalidSearchRankingScoreThreshold;
@ -136,6 +189,14 @@ impl std::convert::TryFrom<f64> for RankingScoreThreshold {
}
}
impl From<index::RankingScoreThreshold> for RankingScoreThreshold {
fn from(threshold: index::RankingScoreThreshold) -> Self {
let threshold = threshold.as_f64();
assert!((0.0..=1.0).contains(&threshold));
RankingScoreThreshold(threshold)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Deserr)]
#[deserr(try_from(f64) = TryFrom::try_from -> InvalidSimilarRankingScoreThreshold)]
pub struct RankingScoreThresholdSimilar(f64);
@ -162,6 +223,7 @@ impl fmt::Debug for SearchQuery {
let Self {
q,
vector,
media,
hybrid,
offset,
limit,
@ -216,6 +278,9 @@ impl fmt::Debug for SearchQuery {
);
}
}
if let Some(media) = media {
debug.field("media", media);
}
if let Some(hybrid) = hybrid {
debug.field("hybrid", &hybrid);
}
@ -279,8 +344,8 @@ impl fmt::Debug for SearchQuery {
#[deserr(error = DeserrJsonError<InvalidSearchHybridQuery>, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct HybridQuery {
#[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>, default)]
#[schema(value_type = f32, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>)]
#[schema(default, value_type = f32)]
#[serde(default)]
pub semantic_ratio: SemanticRatio,
#[deserr(error = DeserrJsonError<InvalidSearchEmbedder>)]
@ -341,10 +406,10 @@ impl SearchKind {
route: Route,
) -> Result<(String, Arc<Embedder>, bool), ResponseError> {
let rtxn = index.read_txn()?;
let embedder_configs = index.embedding_configs(&rtxn)?;
let embedder_configs = index.embedding_configs().embedding_configs(&rtxn)?;
let embedders = index_scheduler.embedders(index_uid, embedder_configs)?;
let (embedder, _, quantized) = embedders
let (embedder, quantized) = embedders
.get(embedder_name)
.ok_or(match route {
Route::Search | Route::MultiSearch => {
@ -354,6 +419,7 @@ impl SearchKind {
milli::UserError::InvalidSimilarEmbedder(embedder_name.to_owned())
}
})
.map(|runtime| (runtime.embedder.clone(), runtime.is_quantized))
.map_err(milli::Error::from)?;
if let Some(vector_len) = vector_len {
@ -423,8 +489,10 @@ pub struct SearchQueryWithIndex {
pub index_uid: IndexUid,
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
pub q: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
pub vector: Option<Vec<f32>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
pub media: Option<serde_json::Value>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
pub hybrid: Option<HybridQuery>,
#[deserr(default, error = DeserrJsonError<InvalidSearchOffset>)]
@ -505,6 +573,7 @@ impl SearchQueryWithIndex {
let SearchQuery {
q,
vector,
media,
hybrid,
offset,
limit,
@ -535,6 +604,7 @@ impl SearchQueryWithIndex {
index_uid,
q,
vector,
media,
hybrid,
offset: if offset == DEFAULT_SEARCH_OFFSET() { None } else { Some(offset) },
limit: if limit == DEFAULT_SEARCH_LIMIT() { None } else { Some(limit) },
@ -569,6 +639,7 @@ impl SearchQueryWithIndex {
federation_options,
q,
vector,
media,
offset,
limit,
page,
@ -599,6 +670,7 @@ impl SearchQueryWithIndex {
SearchQuery {
q,
vector,
media,
offset: offset.unwrap_or(DEFAULT_SEARCH_OFFSET()),
limit: limit.unwrap_or(DEFAULT_SEARCH_LIMIT()),
page,
@ -717,6 +789,16 @@ impl From<MatchingStrategy> for TermsMatchingStrategy {
}
}
impl From<index::MatchingStrategy> for MatchingStrategy {
fn from(other: index::MatchingStrategy) -> Self {
match other {
index::MatchingStrategy::Last => Self::Last,
index::MatchingStrategy::All => Self::All,
index::MatchingStrategy::Frequency => Self::Frequency,
}
}
}
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserr)]
#[deserr(rename_all = camelCase)]
pub enum FacetValuesSort {
@ -882,7 +964,7 @@ pub fn add_search_rules(filter: &mut Option<Value>, rules: IndexSearchRules) {
}
}
fn prepare_search<'t>(
pub fn prepare_search<'t>(
index: &'t Index,
rtxn: &'t RoTxn,
query: &'t SearchQuery,
@ -890,6 +972,9 @@ fn prepare_search<'t>(
time_budget: TimeBudget,
features: RoFeatures,
) -> Result<(milli::Search<'t>, bool, usize, usize), ResponseError> {
if query.media.is_some() {
features.check_multimodal("passing `media` in a search query")?;
}
let mut search = index.search(rtxn);
search.time_budget(time_budget);
if let Some(ranking_score_threshold) = query.ranking_score_threshold {
@ -915,14 +1000,27 @@ fn prepare_search<'t>(
let deadline = std::time::Instant::now() + std::time::Duration::from_secs(10);
let q = query.q.as_deref();
let media = query.media.as_ref();
let search_query = match (q, media) {
(Some(text), None) => milli::vector::SearchQuery::Text(text),
(q, media) => milli::vector::SearchQuery::Media { q, media },
};
embedder
.embed_search(query.q.as_ref().unwrap(), Some(deadline))
.embed_search(search_query, Some(deadline))
.map_err(milli::vector::Error::from)
.map_err(milli::Error::from)?
}
};
search.semantic(embedder_name.clone(), embedder.clone(), *quantized, Some(vector));
search.semantic(
embedder_name.clone(),
embedder.clone(),
*quantized,
Some(vector),
query.media.clone(),
);
}
SearchKind::Hybrid { embedder_name, embedder, quantized, semantic_ratio: _ } => {
if let Some(q) = &query.q {
@ -934,6 +1032,7 @@ fn prepare_search<'t>(
embedder.clone(),
*quantized,
query.vector.clone(),
query.media.clone(),
);
}
}
@ -1058,6 +1157,7 @@ pub fn perform_search(
locales,
// already used in prepare_search
vector: _,
media: _,
hybrid: _,
offset: _,
ranking_score_threshold: _,
@ -1260,7 +1360,6 @@ struct HitMaker<'a> {
vectors_fid: Option<FieldId>,
retrieve_vectors: RetrieveVectors,
to_retrieve_ids: BTreeSet<FieldId>,
embedding_configs: Vec<milli::index::IndexEmbeddingConfig>,
formatter_builder: MatcherBuilder<'a>,
formatted_options: BTreeMap<FieldId, FormatOptions>,
show_ranking_score: bool,
@ -1375,8 +1474,6 @@ impl<'a> HitMaker<'a> {
&displayed_ids,
);
let embedding_configs = index.embedding_configs(rtxn)?;
Ok(Self {
index,
rtxn,
@ -1385,7 +1482,6 @@ impl<'a> HitMaker<'a> {
vectors_fid,
retrieve_vectors,
to_retrieve_ids,
embedding_configs,
formatter_builder,
formatted_options,
show_ranking_score: format.show_ranking_score,
@ -1431,14 +1527,8 @@ impl<'a> HitMaker<'a> {
Some(Value::Object(map)) => map,
_ => Default::default(),
};
for (name, vector) in self.index.embeddings(self.rtxn, id)? {
let user_provided = self
.embedding_configs
.iter()
.find(|conf| conf.name == name)
.is_some_and(|conf| conf.user_provided.contains(id));
let embeddings =
ExplicitVectors { embeddings: Some(vector.into()), regenerate: !user_provided };
for (name, (vector, regenerate)) in self.index.embeddings(self.rtxn, id)? {
let embeddings = ExplicitVectors { embeddings: Some(vector.into()), regenerate };
vectors.insert(
name,
serde_json::to_value(embeddings).map_err(InternalError::SerdeJson)?,

View File

@ -421,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
{
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`",
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
"code": "invalid_api_key_actions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
@ -849,11 +849,27 @@ async fn list_api_keys() {
"expiresAt": null,
"createdAt": "[ignored]",
"updatedAt": "[ignored]"
},
{
"name": "Default Chat API Key",
"description": "Use it to chat and search from the frontend",
"key": "[ignored]",
"uid": "[ignored]",
"actions": [
"chatCompletions",
"search"
],
"indexes": [
"*"
],
"expiresAt": null,
"createdAt": "[ignored]",
"updatedAt": "[ignored]"
}
],
"offset": 0,
"limit": 20,
"total": 3
"total": 4
}
"###);
meili_snap::snapshot!(code, @"200 OK");

View File

@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`",
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
"code": "invalid_api_key_actions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"

View File

@ -42,7 +42,7 @@ async fn batch_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View File

@ -327,7 +327,7 @@ async fn test_summarized_document_addition_or_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -371,7 +371,7 @@ async fn test_summarized_document_addition_or_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -420,7 +420,7 @@ async fn test_summarized_delete_documents_by_batch() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -463,7 +463,7 @@ async fn test_summarized_delete_documents_by_batch() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -512,7 +512,7 @@ async fn test_summarized_delete_documents_by_filter() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -557,7 +557,7 @@ async fn test_summarized_delete_documents_by_filter() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -602,7 +602,7 @@ async fn test_summarized_delete_documents_by_filter() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -648,7 +648,7 @@ async fn test_summarized_delete_document_by_id() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -691,7 +691,7 @@ async fn test_summarized_delete_document_by_id() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -759,7 +759,7 @@ async fn test_summarized_settings_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -1315,7 +1315,7 @@ async fn test_summarized_batch_deletion() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `taskDeletion` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `taskDeletion` because they cannot be batched with tasks of any other type."
}
"###);
}

View File

@ -278,7 +278,7 @@ pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shar
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
INDEX.get_or_init(|| async {
let server = Server::new_shared();
let index = server._index("SCORE_DOCUMENTS").to_shared();
let index = server._index("SHARED_SCORE_DOCUMENTS").to_shared();
let documents = SCORE_DOCUMENTS.clone();
let (response, _code) = index._add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();

View File

@ -347,6 +347,16 @@ impl<State> Server<State> {
}
}
pub fn unique_index_with_prefix(&self, prefix: &str) -> Index<'_> {
let uuid = Uuid::new_v4();
Index {
uid: format!("{prefix}-{}", uuid),
service: &self.service,
encoder: Encoder::Plain,
marker: PhantomData,
}
}
pub fn unique_index_with_encoder(&self, encoder: Encoder) -> Index<'_> {
let uuid = Uuid::new_v4();
Index { uid: uuid.to_string(), service: &self.service, encoder, marker: PhantomData }
@ -454,6 +464,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
skip_index_budget: true,
// Having 2 threads makes the tests way faster
max_indexing_threads: MaxThreads::from_str("2").unwrap(),
experimental_no_edition_2024_for_settings: false,
},
experimental_enable_metrics: false,
..Parser::parse_from(None as Option<&str>)

View File

@ -293,7 +293,7 @@ async fn add_csv_document() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -358,7 +358,7 @@ async fn add_csv_document_with_types() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -434,7 +434,7 @@ async fn add_csv_document_with_custom_delimiter() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -991,7 +991,7 @@ async fn add_documents_no_index_creation() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1068,7 +1068,7 @@ async fn document_addition_with_primary_key() {
}
"#);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
snapshot!(code, @"200 OK");
@ -1120,7 +1120,7 @@ async fn document_addition_with_huge_int_primary_key() {
let (response, code) = index.add_documents(documents, Some("primary")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(response,
@r###"
{
@ -1178,7 +1178,7 @@ async fn replace_document() {
}
"#);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -1190,7 +1190,7 @@ async fn replace_document() {
let (task, code) = index.add_documents(documents, None).await;
snapshot!(code,@"202 Accepted");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
@ -1362,7 +1362,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1399,7 +1399,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.failed();
server.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1436,7 +1436,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.failed();
server.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1478,7 +1478,7 @@ async fn error_add_documents_missing_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1527,7 +1527,7 @@ async fn error_document_field_limit_reached_in_one_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@ -1576,7 +1576,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1611,7 +1611,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1660,7 +1660,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@ -1705,7 +1705,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1741,7 +1741,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1790,7 +1790,7 @@ async fn add_documents_with_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
@ -1914,7 +1914,7 @@ async fn update_documents_with_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
@ -1983,7 +1983,7 @@ async fn update_documents_with_geo_field() {
}
]);
let (task, _status_code) = index.update_documents(updated_documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2097,7 +2097,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }),
@ -2135,7 +2135,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2173,7 +2173,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2211,7 +2211,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2249,7 +2249,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2287,7 +2287,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2325,7 +2325,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2363,7 +2363,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2401,7 +2401,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2439,7 +2439,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2477,7 +2477,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2515,7 +2515,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2556,7 +2556,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2593,7 +2593,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2630,7 +2630,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2674,7 +2674,7 @@ async fn add_invalid_geo_and_then_settings() {
]);
let (ret, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await.succeeded();
let ret = server.wait_task(ret.uid()).await.succeeded();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2697,7 +2697,7 @@ async fn add_invalid_geo_and_then_settings() {
let (ret, code) = index.update_settings(json!({ "sortableAttributes": ["_geo"] })).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await.failed();
let ret = server.wait_task(ret.uid()).await.failed();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2765,7 +2765,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2806,7 +2806,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2845,7 +2845,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2884,12 +2884,12 @@ async fn add_documents_with_primary_key_twice() {
]);
let (task, _status_code) = index.add_documents(documents.clone(), Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
let (task, _status_code) = index.add_documents(documents, Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}
@ -2922,7 +2922,7 @@ async fn batch_several_documents_addition() {
// wait first batch of documents to finish
let finished_tasks = futures::future::join_all(waiter).await;
for (task, _code) in finished_tasks {
index.wait_task(task.uid()).await;
server.wait_task(task.uid()).await;
}
// run a second completely failing batch
@ -2936,7 +2936,7 @@ async fn batch_several_documents_addition() {
// wait second batch of documents to finish
let finished_tasks = futures::future::join_all(waiter).await;
for (task, _code) in finished_tasks {
index.wait_task(task.uid()).await;
server.wait_task(task.uid()).await;
}
let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await;

View File

@ -5,11 +5,12 @@ use crate::json;
#[actix_rt::test]
async fn delete_one_document_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@ -19,7 +20,7 @@ async fn delete_one_unexisting_document() {
index.create(None).await;
let (response, code) = index.delete_document(0).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
}
#[actix_rt::test]
@ -28,10 +29,10 @@ async fn delete_one_document() {
let index = server.unique_index();
let (task, _status_code) =
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, status_code) = index.delete_document(0).await;
assert_eq!(status_code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get_document(0, None).await;
assert_eq!(code, 404);
@ -44,7 +45,7 @@ async fn clear_all_documents_unexisting_index() {
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@ -57,11 +58,11 @@ async fn clear_all_documents() {
None,
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -72,11 +73,11 @@ async fn clear_all_documents_empty_index() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -95,7 +96,7 @@ async fn error_delete_batch_unexisting_index() {
});
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.wait_task(task.uid()).await.failed();
assert_eq!(response["error"], expected_response);
}
@ -104,11 +105,11 @@ async fn delete_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.delete_batch(vec![1, 0]).await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
@ -120,11 +121,11 @@ async fn delete_no_document_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{response}");
let _update = index.wait_task(response.uid()).await.succeeded();
let _update = server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
@ -146,7 +147,7 @@ async fn delete_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (stats, _) = index.stats().await;
snapshot!(json_string!(stats, {
@ -180,7 +181,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -253,7 +254,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -328,7 +329,7 @@ async fn delete_document_by_complex_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.delete_document_by_filter(
json!({ "filter": ["color != red", "color != green", "color EXISTS"] }),
@ -345,7 +346,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -404,7 +405,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",

View File

@ -621,7 +621,7 @@ async fn delete_document_by_filter() {
let (response, code) =
index.delete_document_by_filter_fail(json!({ "filter": "catto = jorts"})).await;
snapshot!(code, @"202 Accepted");
let response = server.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await.failed();
snapshot!(response, @r###"
{
"uid": "[uid]",
@ -665,7 +665,7 @@ async fn fetch_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!(null)).await;
snapshot!(code, @"400 Bad Request");

View File

@ -23,7 +23,7 @@ async fn error_get_unexisting_document() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(1, None).await;
@ -43,7 +43,7 @@ async fn get_document() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
"id": 0,
@ -52,7 +52,7 @@ async fn get_document() {
]);
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, None).await;
assert_eq!(code, 200);
assert_eq!(
@ -276,7 +276,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
@ -293,7 +293,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
]);
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, Some(json!({ "fields": ["content"] }))).await;
assert_eq!(code, 200);
@ -369,7 +369,7 @@ async fn get_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({})).await;
let (response2, code2) = index.get_all_documents_raw("").await;
@ -525,7 +525,7 @@ async fn get_document_by_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.fetch_documents(json!({
@ -651,7 +651,7 @@ async fn get_document_invalid_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({"ids": ["0", "illegal/docid"] })).await;
let (response2, code2) = index.get_all_documents_raw("?ids=0,illegal/docid").await;
@ -683,7 +683,7 @@ async fn get_document_not_found_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({"ids": ["0", 3, 42] })).await;
let (response2, code2) = index.get_all_documents_raw("?ids=0,3,42").await;
@ -726,7 +726,7 @@ async fn get_document_by_ids_and_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.fetch_documents(json!({"ids": [2], "filter": "color = blue" })).await;
@ -854,7 +854,7 @@ async fn get_document_with_vectors() {
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// by default you shouldn't see the `_vectors` object
let (documents, _code) = index.get_all_documents(Default::default()).await;

View File

@ -6,19 +6,18 @@ use crate::json;
#[actix_rt::test]
async fn error_document_update_create_index_bad_uid() {
let server = Server::new().await;
let index = server.index("883 fj!");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("883 fj!");
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
let expected_response = json!({
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
});
assert_eq!(code, 400);
assert_eq!(response, expected_response);
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "`883 fj!-[uuid]` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}"###);
}
#[actix_rt::test]
@ -35,7 +34,7 @@ async fn document_update_with_primary_key() {
let (response, code) = index.update_documents(documents, Some("primary")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -64,7 +63,7 @@ async fn update_document() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -76,7 +75,7 @@ async fn update_document() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -108,7 +107,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -120,7 +119,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -143,7 +142,7 @@ async fn update_larger_dataset() {
let index = server.unique_index();
let documents = serde_json::from_str(include_str!("../assets/test_set.json")).unwrap();
let (task, _code) = index.update_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
assert_eq!(response["type"], "documentAdditionOrUpdate");
@ -167,7 +166,7 @@ async fn error_update_documents_bad_document_id() {
}
]);
let (task, _code) = index.update_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], json!("failed"));
assert_eq!(
response["error"]["message"],
@ -195,7 +194,7 @@ async fn error_update_documents_missing_document_id() {
}
]);
let (task, _code) = index.update_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(
response["error"]["message"],
@ -220,7 +219,7 @@ async fn update_faceted_document() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents: Vec<_> = (0..1000)
.map(|id| {
@ -234,7 +233,7 @@ async fn update_faceted_document() {
let (response, code) = index.add_documents(documents.into(), None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -246,7 +245,7 @@ async fn update_faceted_document() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({"limit": 10}), |response, code| {

View File

@ -2187,7 +2187,9 @@ async fn import_dump_v6_containing_experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);
@ -2312,7 +2314,9 @@ async fn import_dump_v6_containing_batches_and_enqueued_tasks() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);
@ -2417,7 +2421,9 @@ async fn generate_and_import_dump_containing_vectors() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);

View File

@ -27,7 +27,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
"duration": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
},
{
"uid": 1,
@ -51,7 +51,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
"duration": "PT0.144827890S",
"startedAt": "2025-02-04T10:15:21.275640274Z",
"finishedAt": "2025-02-04T10:15:21.420468164Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -72,7 +72,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
"duration": "PT0.032902186S",
"startedAt": "2025-02-04T10:14:43.559526162Z",
"finishedAt": "2025-02-04T10:14:43.592428348Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 3,

View File

@ -24,7 +24,9 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);
@ -39,7 +41,9 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);
@ -54,7 +58,9 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);
@ -70,7 +76,9 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);
@ -86,7 +94,9 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);
}
@ -109,7 +119,9 @@ async fn experimental_feature_metrics() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false,
"multimodal": false
}
"###);
@ -156,7 +168,7 @@ async fn errors() {
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`",
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`, `multimodal`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"

View File

@ -17,7 +17,7 @@ async fn create_index_no_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -34,7 +34,7 @@ async fn create_index_with_gzip_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -46,8 +46,10 @@ async fn create_index_with_gzip_encoded_request_and_receiving_brotli_encoded_res
let server = Server::new_shared();
let app = server.init_web_app().await;
let index = server.unique_index_with_prefix("test");
let body = serde_json::to_string(&json!({
"uid": "test",
"uid": index.uid.clone(),
"primaryKey": None::<&str>,
}))
.unwrap();
@ -68,7 +70,7 @@ async fn create_index_with_gzip_encoded_request_and_receiving_brotli_encoded_res
let parsed_response =
serde_json::from_slice::<Value>(decoded.into().as_ref()).expect("Expecting valid json");
assert_eq!(parsed_response["indexUid"], "test");
assert_eq!(parsed_response["indexUid"], index.uid);
}
#[actix_rt::test]
@ -81,7 +83,7 @@ async fn create_index_with_zlib_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -98,7 +100,7 @@ async fn create_index_with_brotli_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -115,7 +117,7 @@ async fn create_index_with_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -130,7 +132,7 @@ async fn create_index_with_invalid_primary_key() {
let index = server.unique_index();
let (response, code) = index.add_documents(documents, Some("title")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.failed();
server.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@ -140,7 +142,7 @@ async fn create_index_with_invalid_primary_key() {
let (response, code) = index.add_documents(documents, Some("id")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.failed();
server.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@ -179,7 +181,7 @@ async fn error_create_existing_index() {
let (task, _) = index.create(Some("primary")).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
let msg = format!(
"Index `{}` already exists.",
task["indexUid"].as_str().expect("indexUid should exist").trim_matches('"')

View File

@ -9,7 +9,7 @@ async fn create_and_delete_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
assert_eq!(index.get().await.1, 200);
@ -17,18 +17,19 @@ async fn create_and_delete_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
assert_eq!(index.get().await.1, 404);
}
#[actix_rt::test]
async fn error_delete_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_index_fail().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": "Index `DOES_NOT_EXISTS` not found.",
@ -37,7 +38,7 @@ async fn error_delete_unexisting_index() {
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(response["error"], expected_response);
}
@ -58,7 +59,7 @@ async fn loop_delete_add_documents() {
}
for task in tasks {
let response = index.wait_task(task).await.succeeded();
let response = server.wait_task(task).await.succeeded();
assert_eq!(response["status"], "succeeded", "{}", response);
}
}

View File

@ -1,8 +1,8 @@
use crate::json;
use meili_snap::{json_string, snapshot};
use serde_json::Value;
use crate::common::{shared_does_not_exists_index, Server};
use crate::json;
#[actix_rt::test]
async fn create_and_get_index() {
@ -12,7 +12,7 @@ async fn create_and_get_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get().await;

View File

@ -10,7 +10,7 @@ async fn stats() {
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.stats().await;
@ -33,7 +33,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.stats().await;

View File

@ -12,10 +12,10 @@ async fn update_primary_key() {
let (task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get().await;
@ -42,12 +42,12 @@ async fn create_and_update_with_different_encoding() {
let (create_task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(create_task.uid()).await.succeeded();
server.wait_task(create_task.uid()).await.succeeded();
let index = index.with_encoder(Encoder::Brotli);
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
@ -58,23 +58,24 @@ async fn update_nothing() {
assert_eq!(code, 202);
index.wait_task(task1.uid()).await.succeeded();
server.wait_task(task1.uid()).await.succeeded();
let (task2, code) = index.update(None).await;
assert_eq!(code, 202);
index.wait_task(task2.uid()).await.succeeded();
server.wait_task(task2.uid()).await.succeeded();
}
#[actix_rt::test]
async fn error_update_existing_primary_key() {
let server = Server::new_shared();
let index = shared_index_with_documents().await;
let (update_task, code) = index.update_index_fail(Some("primary")).await;
assert_eq!(code, 202);
let response = index.wait_task(update_task.uid()).await.failed();
let response = server.wait_task(update_task.uid()).await.failed();
let expected_response = json!({
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
@ -88,12 +89,13 @@ async fn error_update_existing_primary_key() {
#[actix_rt::test]
async fn error_update_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": format!("Index `{}` not found.", index.uid),

View File

@ -152,7 +152,7 @@ async fn distinct_search_with_offset_no_ranking() {
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
fn get_hits(response: &Value) -> Vec<&str> {
let hits_array = response["hits"].as_array().unwrap();
@ -211,7 +211,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
fn get_hits(response: &Value) -> Vec<&str> {
let hits_array = response["hits"].as_array().unwrap();
@ -281,7 +281,7 @@ async fn distinct_at_search_time() {
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
let task = index.wait_task(task.uid()).await.succeeded();
let task = server.wait_task(task.uid()).await.succeeded();
snapshot!(task, name: "succeed");
fn get_hits(response: &Value) -> Vec<String> {

View File

@ -1,10 +1,9 @@
use meili_snap::*;
use super::test_settings_documents_indexing_swapping_and_search;
use crate::common::{shared_does_not_exists_index, Server, DOCUMENTS, NESTED_DOCUMENTS};
use crate::json;
use super::test_settings_documents_indexing_swapping_and_search;
#[actix_rt::test]
async fn search_unexisting_index() {
let index = shared_does_not_exists_index().await;
@ -426,7 +425,7 @@ async fn search_non_filterable_facets() {
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
// Wait for the settings update to complete
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@ -457,7 +456,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
let index = server.unique_index();
let (response, _code) =
index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@ -487,7 +486,7 @@ async fn search_non_filterable_facets_no_filterable() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"filterableAttributes": []})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@ -518,7 +517,7 @@ async fn search_non_filterable_facets_multiple_facets() {
let index = server.unique_index();
let (response, _uid) =
index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo", "neko"]})).await;
snapshot!(code, @"400 Bad Request");
@ -1002,7 +1001,7 @@ async fn sort_geo_reserved_attribute() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
@ -1029,7 +1028,7 @@ async fn sort_reserved_attribute() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
@ -1055,7 +1054,7 @@ async fn sort_unsortable_attribute() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: Attribute `title` is not sortable. Available sortable attributes are: `id`.", index.uid),
@ -1082,7 +1081,7 @@ async fn sort_invalid_syntax() {
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
@ -1113,7 +1112,7 @@ async fn sort_unset_ranking_rule() {
json!({"sortableAttributes": ["title"], "rankingRules": ["proximity", "exactness"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: You must specify where `sort` is listed in the rankingRules setting to use the sort parameter at search time.", index.uid),
@ -1200,7 +1199,7 @@ async fn distinct_at_search_time() {
let index = server.unique_index();
let (response, _code) =
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
@ -1215,7 +1214,7 @@ async fn distinct_at_search_time() {
"###);
let (task, _) = index.update_settings_filterable_attributes(json!(["color", "machin"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
@ -1230,7 +1229,7 @@ async fn distinct_at_search_time() {
"###);
let (task, _) = index.update_settings_displayed_attributes(json!(["color"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;

View File

@ -50,11 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@ -70,11 +70,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@ -94,7 +94,7 @@ async fn simple_facet_search() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -207,10 +207,10 @@ async fn simple_facet_search_on_movies() {
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
@ -228,7 +228,7 @@ async fn advanced_facet_search() {
index.update_settings_filterable_attributes(json!(["genres"])).await;
index.update_settings_typo_tolerance(json!({ "enabled": false })).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
@ -252,7 +252,7 @@ async fn more_advanced_facet_search() {
index.update_settings_filterable_attributes(json!(["genres"])).await;
index.update_settings_typo_tolerance(json!({ "disableOnWords": ["adventre"] })).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
@ -276,7 +276,7 @@ async fn simple_facet_search_with_max_values() {
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -298,7 +298,7 @@ async fn simple_facet_search_by_count_with_max_values() {
.await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -314,7 +314,7 @@ async fn non_filterable_facet_search_error() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -333,7 +333,7 @@ async fn facet_search_dont_support_words() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
@ -351,7 +351,7 @@ async fn simple_facet_search_with_sort_by_count() {
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -370,7 +370,7 @@ async fn add_documents_and_deactivate_facet_search() {
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"facetSearch": false,
@ -378,7 +378,7 @@ async fn add_documents_and_deactivate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -406,10 +406,10 @@ async fn deactivate_facet_search_and_add_documents() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -437,10 +437,10 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -448,7 +448,7 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -469,10 +469,10 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -480,7 +480,7 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -920,13 +920,13 @@ async fn distinct_facet_search_on_movies() {
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;

View File

@ -3,13 +3,11 @@ use meilisearch::Opt;
use tempfile::TempDir;
use super::test_settings_documents_indexing_swapping_and_search;
use crate::{
common::{
default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server,
DOCUMENTS, NESTED_DOCUMENTS,
},
json,
use crate::common::{
default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server,
DOCUMENTS, NESTED_DOCUMENTS,
};
use crate::json;
#[actix_rt::test]
async fn search_with_filter_string_notation() {
@ -92,7 +90,7 @@ async fn search_with_contains_filter() {
let documents = DOCUMENTS.clone();
let (request, _code) = index.add_documents(documents, None).await;
index.wait_task(request.uid()).await.succeeded();
server.wait_task(request.uid()).await.succeeded();
let (response, code) = index
.search_post(json!({
@ -259,7 +257,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index
.update_settings(json!({"filterableAttributes": [{
@ -271,7 +269,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@ -336,7 +334,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@ -447,7 +445,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter returns an error
index
@ -546,7 +544,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index

View File

@ -26,7 +26,7 @@ async fn search_formatted_from_sdk() {
{ "id": 42, "title": "The Hitchhiker's Guide to the Galaxy" }
]);
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await;
index
.search(
@ -65,7 +65,7 @@ async fn formatted_contain_wildcard() {
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index.search(json!({ "q": "pésti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }),
|response, code|
@ -398,7 +398,7 @@ async fn displayedattr_2_smol() {
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
@ -596,7 +596,7 @@ async fn test_cjk_highlight() {
{ "id": 1, "title": "大卫到了扫罗那里" },
]);
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({"q": "", "attributesToHighlight": ["title"]}), |response, code| {

View File

@ -1,11 +1,10 @@
use meili_snap::{json_string, snapshot};
use meilisearch_types::milli::constants::RESERVED_GEO_FIELD_NAME;
use super::test_settings_documents_indexing_swapping_and_search;
use crate::common::shared_index_with_geo_documents;
use crate::json;
use super::test_settings_documents_indexing_swapping_and_search;
#[actix_rt::test]
async fn geo_sort_with_geo_strings() {
let index = shared_index_with_geo_documents().await;

View File

@ -17,11 +17,11 @@ async fn index_with_documents_user_provided<'a>(
"dimensions": 2}}} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
}
@ -37,11 +37,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Val
}}} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
}
@ -499,7 +499,7 @@ async fn query_combination() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Invalid request: missing `hybrid` parameter when `vector` is present.",
"message": "Invalid request: missing `hybrid` parameter when `vector` or `media` are present.",
"code": "missing_search_hybrid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#missing_search_hybrid"
@ -543,7 +543,7 @@ async fn distinct_is_applied() {
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// pure keyword
let (response, code) = index
@ -633,7 +633,7 @@ async fn retrieve_vectors() {
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.search_post(
@ -683,7 +683,7 @@ async fn retrieve_vectors() {
let (response, code) =
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.search_post(

View File

@ -99,7 +99,7 @@ async fn simple_search() {
)
.await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// english
index
@ -147,23 +147,20 @@ async fn simple_search() {
.search(
json!({"q": "進撃", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(response, @r###"
snapshot!(response, @r#"
{
"hits": [
{
"id": 852
},
{
"id": 853
}
],
"query": "進撃",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
"estimatedTotalHits": 1
}
"###);
"#);
snapshot!(code, @"200 OK");
},
)
@ -172,23 +169,20 @@ async fn simple_search() {
// chinese
index
.search(json!({"q": "进击", "attributesToRetrieve": ["id"]}), |response, code| {
snapshot!(response, @r###"
snapshot!(response, @r#"
{
"hits": [
{
"id": 853
},
{
"id": 852
}
],
"query": "进击",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
"estimatedTotalHits": 1
}
"###);
"#);
snapshot!(code, @"200 OK");
})
.await;
@ -221,7 +215,7 @@ async fn force_locales() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese detection
index
@ -299,7 +293,7 @@ async fn force_locales_with_pattern() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese detection
index
@ -375,7 +369,7 @@ async fn force_locales_with_pattern_nested() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese
index
@ -450,7 +444,7 @@ async fn force_different_locales_with_pattern() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// force chinese
index
@ -528,7 +522,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// auto infer any language
index
@ -602,7 +596,7 @@ async fn auto_infer_locales_at_search() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -701,7 +695,7 @@ async fn force_different_locales_with_pattern_nested() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese
index
@ -779,7 +773,7 @@ async fn settings_change() {
let documents = NESTED_DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.update_settings(json!({
"searchableAttributes": ["document_en", "document_ja", "document_zh"],
@ -798,7 +792,7 @@ async fn settings_change() {
"enqueuedAt": "[date]"
}
"###);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// chinese
index
@ -861,7 +855,7 @@ async fn settings_change() {
"enqueuedAt": "[date]"
}
"###);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// chinese
index
@ -916,7 +910,7 @@ async fn invalid_locales() {
)
.await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"q": "Atta", "locales": ["invalid"]})).await;
snapshot!(code, @"400 Bad Request");
@ -1034,7 +1028,7 @@ async fn simple_facet_search() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.facet_search(json!({"facetName": "name_zh", "facetQuery": "進撃", "locales": ["cmn"]}))
@ -1096,7 +1090,7 @@ async fn facet_search_with_localized_attributes() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.facet_search(json!({"facetName": "name_zh", "facetQuery": "进击", "locales": ["cmn"]}))
@ -1165,7 +1159,7 @@ async fn swedish_search() {
]
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
// infer swedish
index
@ -1286,7 +1280,7 @@ async fn german_search() {
]
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
// infer swedish
index

View File

@ -9,7 +9,7 @@ async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value)
let index = server.unique_index();
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
}

View File

@ -38,11 +38,11 @@ async fn test_settings_documents_indexing_swapping_and_search(
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index.search(query.clone(), test.clone()).await;
@ -51,11 +51,11 @@ async fn test_settings_documents_indexing_swapping_and_search(
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index.search(query.clone(), test.clone()).await;
}
@ -104,7 +104,7 @@ async fn bug_5547() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let mut documents = Vec::new();
for i in 0..65_535 {
@ -112,7 +112,7 @@ async fn bug_5547() {
}
let (response, _code) = index.add_documents(json!(documents), Some("id")).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"q": "title"})).await;
assert_eq!(code, 200);
snapshot!(response["hits"], @r###"[{"id":0,"title":"title0"},{"id":1,"title":"title1"},{"id":10,"title":"title10"},{"id":100,"title":"title100"},{"id":101,"title":"title101"},{"id":102,"title":"title102"},{"id":103,"title":"title103"},{"id":104,"title":"title104"},{"id":105,"title":"title105"},{"id":106,"title":"title106"},{"id":107,"title":"title107"},{"id":108,"title":"title108"},{"id":1000,"title":"title1000"},{"id":1001,"title":"title1001"},{"id":1002,"title":"title1002"},{"id":1003,"title":"title1003"},{"id":1004,"title":"title1004"},{"id":1005,"title":"title1005"},{"id":1006,"title":"title1006"},{"id":1007,"title":"title1007"}]"###);
@ -131,7 +131,7 @@ async fn search_with_stop_word() {
let documents = DOCUMENTS.clone();
let (task, _code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// prefix search
index
@ -196,7 +196,7 @@ async fn search_with_typo_settings() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "287947" }), |response, code| {
@ -228,7 +228,7 @@ async fn phrase_search_with_stop_word() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "how \"to\" train \"the" }), |response, code| {
@ -308,11 +308,11 @@ async fn negative_special_cases_search() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) =
index.update_settings(json!({"synonyms": { "escape": ["gläss"] }})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass
index
@ -338,7 +338,7 @@ async fn test_kanji_language_detection() {
{ "id": 2, "title": "הַשּׁוּעָל הַמָּהִיר (״הַחוּם״) לֹא יָכוֹל לִקְפֹּץ 9.94 מֶטְרִים, נָכוֹן? ברר, 1.5°C- בַּחוּץ!" }
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "東京"}), |response, code| {
@ -361,10 +361,10 @@ async fn test_thai_language() {
{ "id": 2, "title": "สบู่สมุนไพรฝางแดงผสมว่านหางจรเข้ 100 กรัม จำนวน 6 ก้อน" }
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update_settings(json!({"rankingRules": ["exactness"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "สบู"}), |response, code| {
@ -586,7 +586,7 @@ async fn displayed_attributes() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
@ -601,7 +601,7 @@ async fn placeholder_search_is_hard_limited() {
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
let (task, _status_code) = index.add_documents(documents.into(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -630,7 +630,7 @@ async fn placeholder_search_is_hard_limited() {
let (task, _status_code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -665,7 +665,7 @@ async fn search_is_hard_limited() {
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
let (task, _status_code) = index.add_documents(documents.into(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -696,7 +696,7 @@ async fn search_is_hard_limited() {
let (task, _status_code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -735,7 +735,7 @@ async fn faceting_max_values_per_facet() {
let documents: Vec<_> = (0..10_000).map(|id| json!({ "id": id, "number": id * 10 })).collect();
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -752,7 +752,7 @@ async fn faceting_max_values_per_facet() {
let (task, _status_code) =
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -1033,7 +1033,7 @@ async fn test_degraded_score_details() {
index.add_documents(json!(documents), None).await;
// We can't really use anything else than 0ms here; otherwise, the test will get flaky.
let (res, _code) = index.update_settings(json!({ "searchCutoffMs": 0 })).await;
index.wait_task(res.uid()).await.succeeded();
server.wait_task(res.uid()).await.succeeded();
index
.search(
@ -1126,7 +1126,7 @@ async fn camelcased_words() {
{ "id": 4, "title": "testab" },
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "deLonghi"}), |response, code| {
@ -1345,12 +1345,12 @@ async fn simple_search_with_strange_synonyms() {
let (task, _status_code) =
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
let r = index.wait_task(task.uid()).await.succeeded();
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "How to train"}), |response, code| {
@ -1416,11 +1416,11 @@ async fn change_attributes_settings() {
let documents = NESTED_DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task,_status_code) =
index.update_settings(json!({ "searchableAttributes": ["father", "mother", "doggos"], "filterableAttributes": ["doggos"] })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// search
index
@ -1923,7 +1923,7 @@ async fn change_facet_casing() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
.add_documents(
@ -1936,7 +1936,7 @@ async fn change_facet_casing() {
None,
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
.add_documents(
@ -1949,7 +1949,7 @@ async fn change_facet_casing() {
None,
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({ "facets": ["dog"] }), |response, code| {
@ -2054,3 +2054,76 @@ async fn test_exact_typos_terms() {
)
.await;
}
#[actix_rt::test]
async fn simple_search_changing_unrelated_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "Dragon"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
[
{
"title": "How to Train Your Dragon: The Hidden World",
"id": "166428",
"color": [
"green",
"red"
]
}
]
"###);
})
.await;
let (task, _status_code) =
index.update_settings(json!({ "filterableAttributes": ["title"] })).await;
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
index
.search(json!({"q": "Dragon"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
[
{
"title": "How to Train Your Dragon: The Hidden World",
"id": "166428",
"color": [
"green",
"red"
]
}
]
"###);
})
.await;
let (task, _status_code) = index.update_settings(json!({ "filterableAttributes": [] })).await;
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
index
.search(json!({"q": "Dragon"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
[
{
"title": "How to Train Your Dragon: The Hidden World",
"id": "166428",
"color": [
"green",
"red"
]
}
]
"###);
})
.await;
}

File diff suppressed because it is too large Load Diff

View File

@ -1224,6 +1224,7 @@ async fn error_bad_request_facets_by_index_facet() {
}
#[actix_rt::test]
#[ignore]
async fn error_remote_does_not_answer() {
let ms0 = Server::new().await;
let ms1 = Server::new().await;

View File

@ -114,14 +114,14 @@ async fn ensure_placeholder_search_hit_count_valid() {
}
]);
let (task, _code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index
.update_settings(
json!({ "rankingRules": ["distinct:asc"], "distinctAttribute": "distinct"}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
for page in 0..=4 {
index

View File

@ -9,7 +9,7 @@ async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value)
let index = server.unique_index();
let (task, _code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
}
@ -65,7 +65,7 @@ async fn search_no_searchable_attribute_set() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -78,7 +78,7 @@ async fn search_no_searchable_attribute_set() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -109,7 +109,7 @@ async fn search_on_all_attributes_restricted_set() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
@ -194,7 +194,7 @@ async fn word_ranking_rule_order_exact_words() {
let (task, _status_code) = index
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// simple search should return 2 documents (ids: 2 and 3).
index
@ -360,7 +360,7 @@ async fn search_on_exact_field() {
let (response, code) =
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// Searching on an exact attribute should only return the document matching without typo.
index
.search(json!({"q": "Marvel", "attributesToSearchOn": ["exact"]}), |response, code| {
@ -557,7 +557,7 @@ async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
let (task, _status_code) =
index.update_settings_searchable_attributes(json!(["details.title"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -595,7 +595,7 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -608,7 +608,7 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(

View File

@ -7,7 +7,7 @@ async fn set_and_reset_distinct_attribute() {
let index = server.unique_index();
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(task1.uid()).await.succeeded();
server.wait_task(task1.uid()).await.succeeded();
let (response, _) = index.settings().await;
@ -15,7 +15,7 @@ async fn set_and_reset_distinct_attribute() {
let (task2, _status_code) = index.update_settings(json!({ "distinctAttribute": null })).await;
index.wait_task(task2.uid()).await.succeeded();
server.wait_task(task2.uid()).await.succeeded();
let (response, _) = index.settings().await;
@ -28,7 +28,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let index = server.unique_index();
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(update_task1.uid()).await.succeeded();
server.wait_task(update_task1.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@ -36,7 +36,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let (update_task2, _status_code) = index.update_distinct_attribute(json!(null)).await;
index.wait_task(update_task2.uid()).await.succeeded();
server.wait_task(update_task2.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;

View File

@ -338,6 +338,47 @@ async fn settings_bad_pagination() {
"###);
}
#[actix_rt::test]
async fn settings_bad_max_total_hits() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": "doggo" } })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.pagination.maxTotalHits`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_pagination"
}
"###);
let (response, code) =
index.update_settings_pagination(json!({ "maxTotalHits": "doggo" } )).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value type at `.maxTotalHits`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_pagination"
}
"#);
let (response, code) = index.update_settings_pagination(json!({ "maxTotalHits": 0 } )).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value at `.maxTotalHits`: a non-zero integer value lower than `18446744073709551615` was expected, but found a zero",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_pagination"
}
"#);
}
#[actix_rt::test]
async fn settings_bad_search_cutoff_ms() {
let server = Server::new_shared();

View File

@ -58,7 +58,7 @@ macro_rules! test_setting_routes {
let index = server.unique_index();
let (response, code) = index.create(None).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let url = format!("/indexes/{}/settings/{}",
index.uid,
stringify!($setting)
@ -184,6 +184,16 @@ test_setting_routes!(
update_verb: patch,
default_value: {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [], "disableOnNumbers": false}
},
{
setting: chat,
update_verb: put,
default_value: {
"description": "",
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
"documentTemplateMaxBytes": 400,
"searchParameters": {}
}
},
);
#[actix_rt::test]
@ -199,7 +209,7 @@ async fn get_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
let settings = response.as_object().unwrap();
@ -237,6 +247,20 @@ async fn get_settings() {
assert_eq!(settings["prefixSearch"], json!("indexingTime"));
assert_eq!(settings["facetSearch"], json!(true));
assert_eq!(settings["embedders"], json!({}));
assert_eq!(settings["synonyms"], json!({}));
assert_eq!(
settings["typoTolerance"],
json!({
"enabled": true,
"minWordSizeForTypos": {
"oneTypo": 5,
"twoTypos": 9
},
"disableOnWords": [],
"disableOnAttributes": [],
"disableOnNumbers": false
})
);
}
#[actix_rt::test]
@ -244,7 +268,7 @@ async fn secrets_are_hidden_in_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -275,7 +299,7 @@ async fn secrets_are_hidden_in_settings() {
let settings_update_uid = response.uid();
index.wait_task(settings_update_uid).await.succeeded();
server.wait_task(settings_update_uid).await.succeeded();
let (response, code) = index.settings().await;
meili_snap::snapshot!(code, @"200 OK");
@ -374,14 +398,14 @@ async fn test_partial_update() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["foo"]));
assert_eq!(response["searchableAttributes"], json!(["*"]));
let (task, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
@ -396,7 +420,7 @@ async fn error_delete_settings_unexisting_index() {
let (task, code) = index.delete_settings().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@ -414,12 +438,19 @@ async fn reset_all_settings() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (update_task,_status_code) = index
.update_settings(json!({"displayedAttributes": ["name", "age"], "searchableAttributes": ["name"], "stopWords": ["the"], "filterableAttributes": ["age"], "synonyms": {"puppy": ["dog", "doggo", "potat"] }}))
let (update_task, _status_code) = index
.update_settings(json!({
"displayedAttributes": ["name", "age"],
"searchableAttributes": ["name"],
"stopWords": ["the"],
"filterableAttributes": ["age"],
"synonyms": {"puppy": ["dog", "doggo", "potat"] },
"typoTolerance": {"disableOnNumbers": true}
}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
@ -427,9 +458,22 @@ async fn reset_all_settings() {
assert_eq!(response["stopWords"], json!(["the"]));
assert_eq!(response["synonyms"], json!({"puppy": ["dog", "doggo", "potat"] }));
assert_eq!(response["filterableAttributes"], json!(["age"]));
assert_eq!(
response["typoTolerance"],
json!({
"enabled": true,
"minWordSizeForTypos": {
"oneTypo": 5,
"twoTypos": 9
},
"disableOnWords": [],
"disableOnAttributes": [],
"disableOnNumbers": true
})
);
let (delete_task, _status_code) = index.delete_settings().await;
index.wait_task(delete_task.uid()).await.succeeded();
server.wait_task(delete_task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
@ -438,6 +482,19 @@ async fn reset_all_settings() {
assert_eq!(response["stopWords"], json!([]));
assert_eq!(response["filterableAttributes"], json!([]));
assert_eq!(response["synonyms"], json!({}));
assert_eq!(
response["typoTolerance"],
json!({
"enabled": true,
"minWordSizeForTypos": {
"oneTypo": 5,
"twoTypos": 9
},
"disableOnWords": [],
"disableOnAttributes": [],
"disableOnNumbers": false
})
);
let (response, code) = index.get_document(1, None).await;
assert_eq!(code, 200);
@ -450,11 +507,11 @@ async fn update_setting_unexisting_index() {
let index = server.unique_index();
let (task, code) = index.update_settings(json!({})).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get().await;
assert_eq!(code, 200);
let (task, _status_code) = index.delete_settings().await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
@ -497,7 +554,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let index = server.unique_index();
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@ -505,7 +562,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let (task, _status_code) = index.update_distinct_attribute(json!(null)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@ -530,7 +587,7 @@ async fn granular_filterable_attributes() {
{ "attributePatterns": ["default-facet-search"], "features": { "filter": {"equality": true, "comparison": true} } },
] })).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200, "{response}");

View File

@ -26,11 +26,11 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
#[actix_rt::test]
async fn add_docs_and_disable() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -38,8 +38,8 @@ async fn add_docs_and_disable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// only 1 document should match
index
@ -86,8 +86,8 @@ async fn add_docs_and_disable() {
#[actix_rt::test]
async fn disable_and_add_docs() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -95,11 +95,11 @@ async fn disable_and_add_docs() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
// only 1 document should match
index
@ -145,8 +145,8 @@ async fn disable_and_add_docs() {
#[actix_rt::test]
async fn disable_add_docs_and_enable() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -154,11 +154,11 @@ async fn disable_add_docs_and_enable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -166,8 +166,8 @@ async fn disable_add_docs_and_enable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(2).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index
@ -253,8 +253,8 @@ async fn disable_add_docs_and_enable() {
#[actix_rt::test]
async fn disable_add_docs_and_reset() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -262,11 +262,11 @@ async fn disable_add_docs_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -274,8 +274,8 @@ async fn disable_add_docs_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(2).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index
@ -361,19 +361,19 @@ async fn disable_add_docs_and_reset() {
#[actix_rt::test]
async fn default_behavior() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index

View File

@ -30,7 +30,7 @@ async fn attribute_scale_search() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -39,7 +39,7 @@ async fn attribute_scale_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// the expected order is [1, 3, 2] instead of [3, 1, 2]
// because the attribute scale doesn't make the difference between 1 and 3.
@ -103,7 +103,7 @@ async fn attribute_scale_phrase_search() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = index
.update_settings(json!({
@ -111,7 +111,7 @@ async fn attribute_scale_phrase_search() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// the expected order is [1, 3] instead of [3, 1]
// because the attribute scale doesn't make the difference between 1 and 3.
@ -171,7 +171,7 @@ async fn word_scale_set_and_reset() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Set and reset the setting ensuring the swap between the 2 settings is applied.
let (update_task1, _code) = index
@ -180,7 +180,7 @@ async fn word_scale_set_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(update_task1.uid()).await.succeeded();
server.wait_task(update_task1.uid()).await.succeeded();
let (update_task2, _code) = index
.update_settings(json!({
@ -188,7 +188,7 @@ async fn word_scale_set_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(update_task2.uid()).await.succeeded();
server.wait_task(update_task2.uid()).await.succeeded();
// [3, 1, 2]
index
@ -286,7 +286,7 @@ async fn attribute_scale_default_ranking_rules() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -294,7 +294,7 @@ async fn attribute_scale_default_ranking_rules() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// the expected order is [3, 1, 2]
index

View File

@ -15,7 +15,7 @@ async fn set_and_reset() {
"dictionary": ["J.R.R.", "J. R. R."],
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.settings().await;
snapshot!(json_string!(response["nonSeparatorTokens"]), @r###"
@ -45,7 +45,7 @@ async fn set_and_reset() {
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.settings().await;
snapshot!(json_string!(response["nonSeparatorTokens"]), @"[]");
@ -74,7 +74,7 @@ async fn set_and_search() {
let index = server.unique_index();
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
server.wait_task(add_task.uid()).await.succeeded();
let (update_task, _code) = index
.update_settings(json!({
@ -83,7 +83,7 @@ async fn set_and_search() {
"dictionary": ["#", "A#", "B#", "C#", "D#", "E#", "F#", "G#"],
}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
index
.search(json!({"q": "&", "attributesToHighlight": ["content"]}), |response, code| {
@ -228,7 +228,7 @@ async fn advanced_synergies() {
let index = server.unique_index();
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
server.wait_task(add_task.uid()).await.succeeded();
let (update_task, _code) = index
.update_settings(json!({
@ -243,7 +243,7 @@ async fn advanced_synergies() {
}
}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
index
.search(json!({"q": "J.R.R.", "attributesToHighlight": ["content"]}), |response, code| {
@ -353,7 +353,7 @@ async fn advanced_synergies() {
"dictionary": ["J.R.R.", "J. R. R.", "J.K.", "J. K."],
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
index
.search(json!({"q": "jk", "attributesToHighlight": ["content"]}), |response, code| {

View File

@ -47,8 +47,8 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn basic() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -61,12 +61,12 @@ async fn basic() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -233,8 +233,8 @@ async fn basic() {
#[actix_rt::test]
async fn ranking_score_threshold() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -247,12 +247,12 @@ async fn ranking_score_threshold() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -503,8 +503,8 @@ async fn ranking_score_threshold() {
#[actix_rt::test]
async fn filter() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -517,12 +517,12 @@ async fn filter() {
"filterableAttributes": ["title", "release_year"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -621,8 +621,8 @@ async fn filter() {
#[actix_rt::test]
async fn limit_and_offset() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -635,12 +635,12 @@ async fn limit_and_offset() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
index
.similar(

View File

@ -97,7 +97,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
@ -108,7 +108,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
@ -119,7 +119,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View File

@ -43,7 +43,7 @@ async fn version_too_old() {
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.0");
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.16.0");
}
#[actix_rt::test]
@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.15.1 is higher than the Meilisearch version 1.15.0. Downgrade is not supported");
snapshot!(err, @"Database version 1.16.1 is higher than the Meilisearch version 1.16.0. Downgrade is not supported");
}
#[actix_rt::test]

View File

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.16.0"
},
"stats": {
"totalNbTasks": 1,
@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
},
{
"uid": 23,
@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.004146631S",
"startedAt": "2025-01-23T11:38:57.012591321Z",
"finishedAt": "2025-01-23T11:38:57.016737952Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 22,
@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.102738497S",
"startedAt": "2025-01-23T11:36:22.551906856Z",
"finishedAt": "2025-01-23T11:36:22.654645353Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 21,
@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.005108474S",
"startedAt": "2025-01-23T11:36:04.132670526Z",
"finishedAt": "2025-01-23T11:36:04.137779Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 20,
@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.027954894S",
"startedAt": "2025-01-23T11:35:53.631082795Z",
"finishedAt": "2025-01-23T11:35:53.659037689Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 19,
@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.006903297S",
"startedAt": "2025-01-20T11:50:52.874106134Z",
"finishedAt": "2025-01-20T11:50:52.881009431Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 18,
@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000481257S",
"startedAt": "2025-01-20T11:48:04.92820416Z",
"finishedAt": "2025-01-20T11:48:04.928685417Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 17,
@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000407005S",
"startedAt": "2025-01-20T11:47:53.509403957Z",
"finishedAt": "2025-01-20T11:47:53.509810962Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 16,
@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000403716S",
"startedAt": "2025-01-20T11:47:48.430653005Z",
"finishedAt": "2025-01-20T11:47:48.431056721Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 15,
@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000417016S",
"startedAt": "2025-01-20T11:47:42.429678617Z",
"finishedAt": "2025-01-20T11:47:42.430095633Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 14,
@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT12.086284842S",
"startedAt": "2025-01-20T11:47:03.092181576Z",
"finishedAt": "2025-01-20T11:47:15.178466418Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 13,
@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.011506614S",
"startedAt": "2025-01-16T17:18:43.29334923Z",
"finishedAt": "2025-01-16T17:18:43.304855844Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 12,
@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007640163S",
"startedAt": "2025-01-16T17:02:52.539749853Z",
"finishedAt": "2025-01-16T17:02:52.547390016Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 11,
@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007307840S",
"startedAt": "2025-01-16T17:01:14.112756687Z",
"finishedAt": "2025-01-16T17:01:14.120064527Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 10,
@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007391353S",
"startedAt": "2025-01-16T17:00:29.201180268Z",
"finishedAt": "2025-01-16T17:00:29.208571621Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 9,
@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007445825S",
"startedAt": "2025-01-16T17:00:15.77629445Z",
"finishedAt": "2025-01-16T17:00:15.783740275Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 8,
@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.012020083S",
"startedAt": "2025-01-16T16:59:42.744086671Z",
"finishedAt": "2025-01-16T16:59:42.756106754Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 7,
@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007440092S",
"startedAt": "2025-01-16T16:58:41.2155771Z",
"finishedAt": "2025-01-16T16:58:41.223017192Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 6,
@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007565161S",
"startedAt": "2025-01-16T16:54:51.940332781Z",
"finishedAt": "2025-01-16T16:54:51.947897942Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 5,
@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.016307263S",
"startedAt": "2025-01-16T16:53:19.913351957Z",
"finishedAt": "2025-01-16T16:53:19.92965922Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 23,

View File

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.16.0"
},
"stats": {
"totalNbTasks": 1,
@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
},
{
"uid": 23,
@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.004146631S",
"startedAt": "2025-01-23T11:38:57.012591321Z",
"finishedAt": "2025-01-23T11:38:57.016737952Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 22,
@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.102738497S",
"startedAt": "2025-01-23T11:36:22.551906856Z",
"finishedAt": "2025-01-23T11:36:22.654645353Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 21,
@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.005108474S",
"startedAt": "2025-01-23T11:36:04.132670526Z",
"finishedAt": "2025-01-23T11:36:04.137779Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 20,
@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.027954894S",
"startedAt": "2025-01-23T11:35:53.631082795Z",
"finishedAt": "2025-01-23T11:35:53.659037689Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 19,
@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.006903297S",
"startedAt": "2025-01-20T11:50:52.874106134Z",
"finishedAt": "2025-01-20T11:50:52.881009431Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 18,
@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000481257S",
"startedAt": "2025-01-20T11:48:04.92820416Z",
"finishedAt": "2025-01-20T11:48:04.928685417Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 17,
@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000407005S",
"startedAt": "2025-01-20T11:47:53.509403957Z",
"finishedAt": "2025-01-20T11:47:53.509810962Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 16,
@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000403716S",
"startedAt": "2025-01-20T11:47:48.430653005Z",
"finishedAt": "2025-01-20T11:47:48.431056721Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 15,
@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000417016S",
"startedAt": "2025-01-20T11:47:42.429678617Z",
"finishedAt": "2025-01-20T11:47:42.430095633Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 14,
@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT12.086284842S",
"startedAt": "2025-01-20T11:47:03.092181576Z",
"finishedAt": "2025-01-20T11:47:15.178466418Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 13,
@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.011506614S",
"startedAt": "2025-01-16T17:18:43.29334923Z",
"finishedAt": "2025-01-16T17:18:43.304855844Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 12,
@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007640163S",
"startedAt": "2025-01-16T17:02:52.539749853Z",
"finishedAt": "2025-01-16T17:02:52.547390016Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 11,
@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007307840S",
"startedAt": "2025-01-16T17:01:14.112756687Z",
"finishedAt": "2025-01-16T17:01:14.120064527Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 10,
@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007391353S",
"startedAt": "2025-01-16T17:00:29.201180268Z",
"finishedAt": "2025-01-16T17:00:29.208571621Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 9,
@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007445825S",
"startedAt": "2025-01-16T17:00:15.77629445Z",
"finishedAt": "2025-01-16T17:00:15.783740275Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 8,
@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.012020083S",
"startedAt": "2025-01-16T16:59:42.744086671Z",
"finishedAt": "2025-01-16T16:59:42.756106754Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 7,
@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007440092S",
"startedAt": "2025-01-16T16:58:41.2155771Z",
"finishedAt": "2025-01-16T16:58:41.223017192Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 6,
@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007565161S",
"startedAt": "2025-01-16T16:54:51.940332781Z",
"finishedAt": "2025-01-16T16:54:51.947897942Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 5,
@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.016307263S",
"startedAt": "2025-01-16T16:53:19.913351957Z",
"finishedAt": "2025-01-16T16:53:19.92965922Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 23,

View File

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.16.0"
},
"stats": {
"totalNbTasks": 1,
@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
},
{
"uid": 23,
@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.004146631S",
"startedAt": "2025-01-23T11:38:57.012591321Z",
"finishedAt": "2025-01-23T11:38:57.016737952Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 22,
@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.102738497S",
"startedAt": "2025-01-23T11:36:22.551906856Z",
"finishedAt": "2025-01-23T11:36:22.654645353Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 21,
@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.005108474S",
"startedAt": "2025-01-23T11:36:04.132670526Z",
"finishedAt": "2025-01-23T11:36:04.137779Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 20,
@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.027954894S",
"startedAt": "2025-01-23T11:35:53.631082795Z",
"finishedAt": "2025-01-23T11:35:53.659037689Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 19,
@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.006903297S",
"startedAt": "2025-01-20T11:50:52.874106134Z",
"finishedAt": "2025-01-20T11:50:52.881009431Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 18,
@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000481257S",
"startedAt": "2025-01-20T11:48:04.92820416Z",
"finishedAt": "2025-01-20T11:48:04.928685417Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 17,
@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000407005S",
"startedAt": "2025-01-20T11:47:53.509403957Z",
"finishedAt": "2025-01-20T11:47:53.509810962Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 16,
@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000403716S",
"startedAt": "2025-01-20T11:47:48.430653005Z",
"finishedAt": "2025-01-20T11:47:48.431056721Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 15,
@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000417016S",
"startedAt": "2025-01-20T11:47:42.429678617Z",
"finishedAt": "2025-01-20T11:47:42.430095633Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 14,
@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT12.086284842S",
"startedAt": "2025-01-20T11:47:03.092181576Z",
"finishedAt": "2025-01-20T11:47:15.178466418Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 13,
@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.011506614S",
"startedAt": "2025-01-16T17:18:43.29334923Z",
"finishedAt": "2025-01-16T17:18:43.304855844Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 12,
@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007640163S",
"startedAt": "2025-01-16T17:02:52.539749853Z",
"finishedAt": "2025-01-16T17:02:52.547390016Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 11,
@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007307840S",
"startedAt": "2025-01-16T17:01:14.112756687Z",
"finishedAt": "2025-01-16T17:01:14.120064527Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 10,
@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007391353S",
"startedAt": "2025-01-16T17:00:29.201180268Z",
"finishedAt": "2025-01-16T17:00:29.208571621Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 9,
@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007445825S",
"startedAt": "2025-01-16T17:00:15.77629445Z",
"finishedAt": "2025-01-16T17:00:15.783740275Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 8,
@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.012020083S",
"startedAt": "2025-01-16T16:59:42.744086671Z",
"finishedAt": "2025-01-16T16:59:42.756106754Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 7,
@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007440092S",
"startedAt": "2025-01-16T16:58:41.2155771Z",
"finishedAt": "2025-01-16T16:58:41.223017192Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 6,
@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007565161S",
"startedAt": "2025-01-16T16:54:51.940332781Z",
"finishedAt": "2025-01-16T16:54:51.947897942Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 5,
@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.016307263S",
"startedAt": "2025-01-16T16:53:19.913351957Z",
"finishedAt": "2025-01-16T16:53:19.92965922Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 23,

View File

@ -29,7 +29,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 1,

View File

@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.111055654S",
"startedAt": "2025-01-16T16:45:16.020248085Z",
"finishedAt": "2025-01-16T16:45:16.131303739Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 2,

View File

@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.111055654S",
"startedAt": "2025-01-16T16:45:16.020248085Z",
"finishedAt": "2025-01-16T16:45:16.131303739Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 2,

View File

@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.111055654S",
"startedAt": "2025-01-16T16:45:16.020248085Z",
"finishedAt": "2025-01-16T16:45:16.131303739Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 2,

View File

@ -30,7 +30,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 1,

View File

@ -30,7 +30,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 1,

View File

@ -29,7 +29,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 1,

View File

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.16.0"
},
"error": null,
"duration": "[duration]",

View File

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.16.0"
},
"error": null,
"duration": "[duration]",

View File

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.16.0"
},
"error": null,
"duration": "[duration]",

View File

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.16.0"
},
"stats": {
"totalNbTasks": 1,
@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
},
{
"uid": 23,
@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.004146631S",
"startedAt": "2025-01-23T11:38:57.012591321Z",
"finishedAt": "2025-01-23T11:38:57.016737952Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 22,
@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.102738497S",
"startedAt": "2025-01-23T11:36:22.551906856Z",
"finishedAt": "2025-01-23T11:36:22.654645353Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 21,
@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.005108474S",
"startedAt": "2025-01-23T11:36:04.132670526Z",
"finishedAt": "2025-01-23T11:36:04.137779Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 20,
@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.027954894S",
"startedAt": "2025-01-23T11:35:53.631082795Z",
"finishedAt": "2025-01-23T11:35:53.659037689Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 19,
@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.006903297S",
"startedAt": "2025-01-20T11:50:52.874106134Z",
"finishedAt": "2025-01-20T11:50:52.881009431Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 18,
@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000481257S",
"startedAt": "2025-01-20T11:48:04.92820416Z",
"finishedAt": "2025-01-20T11:48:04.928685417Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 17,
@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000407005S",
"startedAt": "2025-01-20T11:47:53.509403957Z",
"finishedAt": "2025-01-20T11:47:53.509810962Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 16,
@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000403716S",
"startedAt": "2025-01-20T11:47:48.430653005Z",
"finishedAt": "2025-01-20T11:47:48.431056721Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 15,
@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000417016S",
"startedAt": "2025-01-20T11:47:42.429678617Z",
"finishedAt": "2025-01-20T11:47:42.430095633Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 14,
@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT12.086284842S",
"startedAt": "2025-01-20T11:47:03.092181576Z",
"finishedAt": "2025-01-20T11:47:15.178466418Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 13,
@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.011506614S",
"startedAt": "2025-01-16T17:18:43.29334923Z",
"finishedAt": "2025-01-16T17:18:43.304855844Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 12,
@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007640163S",
"startedAt": "2025-01-16T17:02:52.539749853Z",
"finishedAt": "2025-01-16T17:02:52.547390016Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 11,
@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007307840S",
"startedAt": "2025-01-16T17:01:14.112756687Z",
"finishedAt": "2025-01-16T17:01:14.120064527Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 10,
@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007391353S",
"startedAt": "2025-01-16T17:00:29.201180268Z",
"finishedAt": "2025-01-16T17:00:29.208571621Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 9,
@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007445825S",
"startedAt": "2025-01-16T17:00:15.77629445Z",
"finishedAt": "2025-01-16T17:00:15.783740275Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 8,
@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.012020083S",
"startedAt": "2025-01-16T16:59:42.744086671Z",
"finishedAt": "2025-01-16T16:59:42.756106754Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 7,
@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007440092S",
"startedAt": "2025-01-16T16:58:41.2155771Z",
"finishedAt": "2025-01-16T16:58:41.223017192Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 6,
@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007565161S",
"startedAt": "2025-01-16T16:54:51.940332781Z",
"finishedAt": "2025-01-16T16:54:51.947897942Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 5,
@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.016307263S",
"startedAt": "2025-01-16T16:53:19.913351957Z",
"finishedAt": "2025-01-16T16:53:19.92965922Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 4,
@ -540,7 +540,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.087655941S",
"startedAt": "2025-01-16T16:52:32.631145531Z",
"finishedAt": "2025-01-16T16:52:32.718801472Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 3,
@ -565,7 +565,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007593573S",
"startedAt": "2025-01-16T16:47:53.677901409Z",
"finishedAt": "2025-01-16T16:47:53.685494982Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 2,
@ -591,7 +591,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.017769760S",
"startedAt": "2025-01-16T16:47:41.211587682Z",
"finishedAt": "2025-01-16T16:47:41.229357442Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 1,
@ -615,7 +615,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.066095506S",
"startedAt": "2025-01-16T16:47:10.217299609Z",
"finishedAt": "2025-01-16T16:47:10.283395115Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -639,7 +639,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.111055654S",
"startedAt": "2025-01-16T16:45:16.020248085Z",
"finishedAt": "2025-01-16T16:45:16.131303739Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 25,

View File

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.16.0"
},
"error": null,
"duration": "[duration]",

View File

@ -1,7 +1,10 @@
use std::collections::BTreeMap;
use std::sync::atomic::AtomicUsize;
use std::time::Duration;
use meili_snap::{json_string, snapshot};
use reqwest::IntoUrl;
use tokio::sync::mpsc;
use wiremock::matchers::{method, path};
use wiremock::{Mock, MockServer, Request, ResponseTemplate};
@ -334,6 +337,41 @@ async fn create_mock_raw() -> (MockServer, Value) {
(mock_server, embedder_settings)
}
async fn create_faulty_mock_raw(sender: mpsc::Sender<()>) -> (MockServer, Value) {
let mock_server = MockServer::start().await;
let count = AtomicUsize::new(0);
Mock::given(method("POST"))
.and(path("/"))
.respond_with(move |_req: &Request| {
let count = count.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
if count >= 5 {
let _ = sender.try_send(());
ResponseTemplate::new(500)
.set_delay(Duration::from_secs(u64::MAX)) // Make the response hang forever
.set_body_string("Service Unavailable")
} else {
ResponseTemplate::new(500).set_body_string("Service Unavailable")
}
})
.mount(&mock_server)
.await;
let url = mock_server.uri();
let embedder_settings = json!({
"source": "rest",
"url": url,
"dimensions": 3,
"request": "{{text}}",
"response": "{{embedding}}",
"documentTemplate": "{{doc.name}}"
});
(mock_server, embedder_settings)
}
pub async fn post<T: IntoUrl>(url: T, text: &str) -> reqwest::Result<reqwest::Response> {
reqwest::Client::builder().build()?.post(url).json(&json!(text)).send().await
}
@ -370,13 +408,13 @@ async fn bad_request() {
.await;
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
}
"###);
{
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
}
"###);
// A repeat string appears inside a repeated value
let (response, code) = index
@ -399,7 +437,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input.input`: \"{{..}}\" appears nested inside of a value that is itself repeated",
"message": "Error while generating embeddings: user error: in `request.input.input`: \"{{..}}\" appears nested inside of a value that is itself repeated\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -422,7 +460,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input.repeat`: \"{{..}}\" appears outside of an array",
"message": "Error while generating embeddings: user error: in `request.input.repeat`: \"{{..}}\" appears outside of an array\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -445,7 +483,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #0",
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #0\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -468,7 +506,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #2",
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #2\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -491,7 +529,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input[0]`: Expected \"{{text}}\" inside of the repeated value",
"message": "Error while generating embeddings: user error: in `request.input[0]`: Expected \"{{text}}\" inside of the repeated value\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -518,7 +556,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{..}}\", but it was already present in `request.input`",
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{..}}\", but it was already present in `request.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -539,7 +577,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input`",
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -560,7 +598,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.repeated.data[1]`: Found \"{{text}}\", but it was already present in `request.repeated.input`",
"message": "Error while generating embeddings: user error: in `request.repeated.data[1]`: Found \"{{text}}\", but it was already present in `request.repeated.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -581,7 +619,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input[0]` (repeated)",
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input[0]` (repeated)\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -882,7 +920,7 @@ async fn bad_settings() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found",
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -2111,3 +2149,71 @@ async fn searchable_reindex() {
}
"###);
}
#[actix_rt::test]
async fn last_error_stats() {
let (sender, mut receiver) = mpsc::channel(10);
let (_mock, setting) = create_faulty_mock_raw(sender).await;
let server = get_server_vector().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
"embedders": {
"rest": setting,
},
}))
.await;
snapshot!(code, @"202 Accepted");
let task = server.wait_task(response.uid()).await;
snapshot!(task["status"], @r###""succeeded""###);
let documents = json!([
{"id": 0, "name": "will_return_500"},
{"id": 1, "name": "will_error"},
{"id": 2, "name": "must_error"},
]);
let (_value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
// The task will eventually fail, so let's not wait for it.
// Let's just wait for the server's signal
receiver.recv().await;
let (response, _code) = index.filtered_batches(&[], &[], &[]).await;
snapshot!(json_string!(response["results"][0], {
".progress" => "[ignored]",
".stats.embedderRequests.total" => "[ignored]",
".stats.embedderRequests.failed" => "[ignored]",
".startedAt" => "[ignored]"
}), @r#"
{
"uid": 1,
"progress": "[ignored]",
"details": {
"receivedDocuments": 3,
"indexedDocuments": null
},
"stats": {
"totalNbTasks": 1,
"status": {
"processing": 1
},
"types": {
"documentAdditionOrUpdate": 1
},
"indexUids": {
"doggo": 1
},
"embedderRequests": {
"total": "[ignored]",
"failed": "[ignored]",
"lastError": "runtime error: received internal error HTTP 500 from embedding server\n - server replied with `Service Unavailable`"
}
},
"duration": null,
"startedAt": "[ignored]",
"finishedAt": null,
"batchStrategy": "batched all enqueued tasks"
}
"#);
}