mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-09-06 04:36:32 +00:00
Merge branch 'main' into default-key
This commit is contained in:
@ -28,7 +28,6 @@ actix-web = { version = "4.11.0", default-features = false, features = [
|
||||
"rustls-0_23",
|
||||
] }
|
||||
anyhow = { version = "1.0.98", features = ["backtrace"] }
|
||||
async-trait = "0.1.88"
|
||||
bstr = "1.12.0"
|
||||
byte-unit = { version = "5.1.6", features = ["serde"] }
|
||||
bytes = "1.10.1"
|
||||
@ -170,5 +169,5 @@ german = ["meilisearch-types/german"]
|
||||
turkish = ["meilisearch-types/turkish"]
|
||||
|
||||
[package.metadata.mini-dashboard]
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.19/build.zip"
|
||||
sha1 = "7974430d5277c97f67cf6e95eec6faaac2788834"
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.20/build.zip"
|
||||
sha1 = "82a7ddd7bf14bb5323c3d235d2b62892a98b6a59"
|
||||
|
@ -197,11 +197,13 @@ struct Infos {
|
||||
experimental_max_number_of_batched_tasks: usize,
|
||||
experimental_limit_batched_tasks_total_size: u64,
|
||||
experimental_network: bool,
|
||||
experimental_multimodal: bool,
|
||||
experimental_chat_completions: bool,
|
||||
experimental_get_task_documents_route: bool,
|
||||
experimental_composite_embedders: bool,
|
||||
experimental_embedding_cache_entries: usize,
|
||||
experimental_no_snapshot_compaction: bool,
|
||||
experimental_no_edition_2024_for_settings: bool,
|
||||
gpu_enabled: bool,
|
||||
db_path: bool,
|
||||
import_dump: bool,
|
||||
@ -286,8 +288,12 @@ impl Infos {
|
||||
ScheduleSnapshot::Enabled(interval) => Some(interval),
|
||||
};
|
||||
|
||||
let IndexerOpts { max_indexing_memory, max_indexing_threads, skip_index_budget: _ } =
|
||||
indexer_options;
|
||||
let IndexerOpts {
|
||||
max_indexing_memory,
|
||||
max_indexing_threads,
|
||||
skip_index_budget: _,
|
||||
experimental_no_edition_2024_for_settings,
|
||||
} = indexer_options;
|
||||
|
||||
let RuntimeTogglableFeatures {
|
||||
metrics,
|
||||
@ -298,6 +304,7 @@ impl Infos {
|
||||
get_task_documents_route,
|
||||
composite_embedders,
|
||||
chat_completions,
|
||||
multimodal,
|
||||
} = features;
|
||||
|
||||
// We're going to override every sensible information.
|
||||
@ -317,6 +324,7 @@ impl Infos {
|
||||
experimental_reduce_indexing_memory_usage,
|
||||
experimental_network: network,
|
||||
experimental_chat_completions: chat_completions,
|
||||
experimental_multimodal: multimodal,
|
||||
experimental_get_task_documents_route: get_task_documents_route,
|
||||
experimental_composite_embedders: composite_embedders,
|
||||
experimental_embedding_cache_entries,
|
||||
@ -350,6 +358,7 @@ impl Infos {
|
||||
ssl_require_auth,
|
||||
ssl_resumption,
|
||||
ssl_tickets,
|
||||
experimental_no_edition_2024_for_settings,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -76,8 +76,10 @@ pub enum MeilisearchHttpError {
|
||||
DocumentFormat(#[from] DocumentFormatError),
|
||||
#[error(transparent)]
|
||||
Join(#[from] JoinError),
|
||||
#[error("Invalid request: missing `hybrid` parameter when `vector` is present.")]
|
||||
#[error("Invalid request: missing `hybrid` parameter when `vector` or `media` are present.")]
|
||||
MissingSearchHybrid,
|
||||
#[error("Invalid request: both `media` and `vector` parameters are present.")]
|
||||
MediaAndVector,
|
||||
}
|
||||
|
||||
impl MeilisearchHttpError {
|
||||
@ -111,6 +113,7 @@ impl ErrorCode for MeilisearchHttpError {
|
||||
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
|
||||
MeilisearchHttpError::Join(_) => Code::Internal,
|
||||
MeilisearchHttpError::MissingSearchHybrid => Code::MissingSearchHybrid,
|
||||
MeilisearchHttpError::MediaAndVector => Code::InvalidSearchMediaAndVector,
|
||||
MeilisearchHttpError::FederationOptionsInNonFederatedRequest(_) => {
|
||||
Code::InvalidMultiSearchFederationOptions
|
||||
}
|
||||
|
@ -37,6 +37,7 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
||||
use meilisearch_auth::{open_auth_store_env, AuthController};
|
||||
use meilisearch_types::milli::constants::VERSION_MAJOR;
|
||||
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||
use meilisearch_types::milli::progress::{EmbedderStats, Progress};
|
||||
use meilisearch_types::milli::update::{
|
||||
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
|
||||
};
|
||||
@ -463,6 +464,7 @@ fn import_dump(
|
||||
index_scheduler: &mut IndexScheduler,
|
||||
auth: &mut AuthController,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
let progress = Progress::default();
|
||||
let reader = File::open(dump_path)?;
|
||||
let mut dump_reader = dump::DumpReader::open(reader)?;
|
||||
|
||||
@ -496,14 +498,20 @@ fn import_dump(
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
// 3. Import the runtime features and network
|
||||
// 3. Import the `ChatCompletionSettings`s.
|
||||
for result in dump_reader.chat_completions_settings()? {
|
||||
let (name, settings) = result?;
|
||||
index_scheduler.put_chat_settings(&name, &settings)?;
|
||||
}
|
||||
|
||||
// 4. Import the runtime features and network
|
||||
let features = dump_reader.features()?.unwrap_or_default();
|
||||
index_scheduler.put_runtime_features(features)?;
|
||||
|
||||
let network = dump_reader.network()?.cloned().unwrap_or_default();
|
||||
index_scheduler.put_network(network)?;
|
||||
|
||||
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
// 4.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
let backup_config;
|
||||
let base_config = index_scheduler.indexer_config();
|
||||
|
||||
@ -520,7 +528,7 @@ fn import_dump(
|
||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||
// try to process tasks while we're trying to import the indexes.
|
||||
|
||||
// 4. Import the indexes.
|
||||
// 5. Import the indexes.
|
||||
for index_reader in dump_reader.indexes()? {
|
||||
let mut index_reader = index_reader?;
|
||||
let metadata = index_reader.metadata();
|
||||
@ -533,20 +541,20 @@ fn import_dump(
|
||||
let mut wtxn = index.write_txn()?;
|
||||
|
||||
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
|
||||
// 4.1 Import the primary key if there is one.
|
||||
// 5.1 Import the primary key if there is one.
|
||||
if let Some(ref primary_key) = metadata.primary_key {
|
||||
builder.set_primary_key(primary_key.to_string());
|
||||
}
|
||||
|
||||
// 4.2 Import the settings.
|
||||
// 5.2 Import the settings.
|
||||
tracing::info!("Importing the settings.");
|
||||
let settings = index_reader.settings()?;
|
||||
apply_settings_to_builder(&settings, &mut builder);
|
||||
builder
|
||||
.execute(|indexing_step| tracing::debug!("update: {:?}", indexing_step), || false)?;
|
||||
let embedder_stats: Arc<EmbedderStats> = Default::default();
|
||||
builder.execute(&|| false, &progress, embedder_stats.clone())?;
|
||||
|
||||
// 4.3 Import the documents.
|
||||
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||
// 5.3 Import the documents.
|
||||
// 5.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||
tracing::info!("Importing the documents.");
|
||||
let file = tempfile::tempfile()?;
|
||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||
@ -557,11 +565,11 @@ fn import_dump(
|
||||
// This flush the content of the batch builder.
|
||||
let file = builder.into_inner()?.into_inner()?;
|
||||
|
||||
// 4.3.2 We feed it to the milli index.
|
||||
// 5.3.2 We feed it to the milli index.
|
||||
let reader = BufReader::new(file);
|
||||
let reader = DocumentsBatchReader::from_reader(reader)?;
|
||||
|
||||
let embedder_configs = index.embedding_configs(&wtxn)?;
|
||||
let embedder_configs = index.embedding_configs().embedding_configs(&wtxn)?;
|
||||
let embedders = index_scheduler.embedders(uid.to_string(), embedder_configs)?;
|
||||
|
||||
let builder = milli::update::IndexDocuments::new(
|
||||
@ -574,6 +582,7 @@ fn import_dump(
|
||||
},
|
||||
|indexing_step| tracing::trace!("update: {:?}", indexing_step),
|
||||
|| false,
|
||||
&embedder_stats,
|
||||
)?;
|
||||
|
||||
let builder = builder.with_embedders(embedders);
|
||||
@ -588,15 +597,15 @@ fn import_dump(
|
||||
index_scheduler.refresh_index_stats(&uid)?;
|
||||
}
|
||||
|
||||
// 5. Import the queue
|
||||
// 6. Import the queue
|
||||
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
||||
// 5.1. Import the batches
|
||||
// 6.1. Import the batches
|
||||
for ret in dump_reader.batches()? {
|
||||
let batch = ret?;
|
||||
index_scheduler_dump.register_dumped_batch(batch)?;
|
||||
}
|
||||
|
||||
// 5.2. Import the tasks
|
||||
// 6.2. Import the tasks
|
||||
for ret in dump_reader.tasks()? {
|
||||
let (task, file) = ret?;
|
||||
index_scheduler_dump.register_dumped_task(task, file)?;
|
||||
|
@ -15,6 +15,33 @@ lazy_static! {
|
||||
"Meilisearch number of degraded search requests"
|
||||
))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_CHAT_SEARCH_REQUESTS: IntCounterVec = register_int_counter_vec!(
|
||||
opts!(
|
||||
"meilisearch_chat_search_requests",
|
||||
"Meilisearch number of search requests performed by the chat route itself"
|
||||
),
|
||||
&["type"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("meilisearch_chat_prompt_tokens_usage", "Meilisearch Chat Prompt Tokens Usage"),
|
||||
&["workspace", "model"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE: IntCounterVec =
|
||||
register_int_counter_vec!(
|
||||
opts!(
|
||||
"meilisearch_chat_completion_tokens_usage",
|
||||
"Meilisearch Chat Completion Tokens Usage"
|
||||
),
|
||||
&["workspace", "model"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("meilisearch_chat_total_tokens_usage", "Meilisearch Chat Total Tokens Usage"),
|
||||
&["workspace", "model"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
|
||||
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes"))
|
||||
.expect("Can't create a metric");
|
||||
|
@ -53,6 +53,8 @@ const MEILI_EXPERIMENTAL_DUMPLESS_UPGRADE: &str = "MEILI_EXPERIMENTAL_DUMPLESS_U
|
||||
const MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS: &str = "MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS";
|
||||
const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE";
|
||||
const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER";
|
||||
const MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS: &str =
|
||||
"MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS";
|
||||
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
|
||||
const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE";
|
||||
const MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER: &str = "MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER";
|
||||
@ -62,7 +64,7 @@ const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
|
||||
const MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS: &str =
|
||||
"MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS";
|
||||
const MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE: &str =
|
||||
"MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_SIZE";
|
||||
"MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE";
|
||||
const MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES: &str =
|
||||
"MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES";
|
||||
const MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION: &str = "MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION";
|
||||
@ -749,12 +751,25 @@ pub struct IndexerOpts {
|
||||
#[clap(skip)]
|
||||
#[serde(skip)]
|
||||
pub skip_index_budget: bool,
|
||||
|
||||
/// Experimental no edition 2024 for settings feature. For more information,
|
||||
/// see: <https://github.com/orgs/meilisearch/discussions/847>
|
||||
///
|
||||
/// Enables the experimental no edition 2024 for settings feature.
|
||||
#[clap(long, env = MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS)]
|
||||
#[serde(default)]
|
||||
pub experimental_no_edition_2024_for_settings: bool,
|
||||
}
|
||||
|
||||
impl IndexerOpts {
|
||||
/// Exports the values to their corresponding env vars if they are not set.
|
||||
pub fn export_to_env(self) {
|
||||
let IndexerOpts { max_indexing_memory, max_indexing_threads, skip_index_budget: _ } = self;
|
||||
let IndexerOpts {
|
||||
max_indexing_memory,
|
||||
max_indexing_threads,
|
||||
skip_index_budget: _,
|
||||
experimental_no_edition_2024_for_settings,
|
||||
} = self;
|
||||
if let Some(max_indexing_memory) = max_indexing_memory.0 {
|
||||
export_to_env_if_not_present(
|
||||
MEILI_MAX_INDEXING_MEMORY,
|
||||
@ -767,6 +782,12 @@ impl IndexerOpts {
|
||||
max_indexing_threads.to_string(),
|
||||
);
|
||||
}
|
||||
if experimental_no_edition_2024_for_settings {
|
||||
export_to_env_if_not_present(
|
||||
MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS,
|
||||
experimental_no_edition_2024_for_settings.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -785,7 +806,12 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
|
||||
max_threads: *other.max_indexing_threads,
|
||||
max_positions_per_attributes: None,
|
||||
skip_index_budget: other.skip_index_budget,
|
||||
..Default::default()
|
||||
experimental_no_edition_2024_for_settings: other
|
||||
.experimental_no_edition_2024_for_settings,
|
||||
chunk_compression_type: Default::default(),
|
||||
chunk_compression_level: Default::default(),
|
||||
documents_chunk_size: Default::default(),
|
||||
max_nb_chunks: Default::default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
135
crates/meilisearch/src/routes/chats/chat_completion_analytics.rs
Normal file
135
crates/meilisearch/src/routes/chats/chat_completion_analytics.rs
Normal file
@ -0,0 +1,135 @@
|
||||
use std::collections::BinaryHeap;
|
||||
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::analytics::Aggregate;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ChatCompletionAggregator {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
time_spent: BinaryHeap<usize>,
|
||||
|
||||
// chat completion specific metrics
|
||||
total_messages: usize,
|
||||
total_streamed_requests: usize,
|
||||
total_non_streamed_requests: usize,
|
||||
|
||||
// model usage tracking
|
||||
models_used: std::collections::HashMap<String, usize>,
|
||||
}
|
||||
|
||||
impl ChatCompletionAggregator {
|
||||
pub fn from_request(model: &str, message_count: usize, is_stream: bool) -> Self {
|
||||
let mut models_used = std::collections::HashMap::new();
|
||||
models_used.insert(model.to_string(), 1);
|
||||
|
||||
Self {
|
||||
total_received: 1,
|
||||
total_succeeded: 0,
|
||||
time_spent: BinaryHeap::new(),
|
||||
|
||||
total_messages: message_count,
|
||||
total_streamed_requests: if is_stream { 1 } else { 0 },
|
||||
total_non_streamed_requests: if is_stream { 0 } else { 1 },
|
||||
|
||||
models_used,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, time_spent: std::time::Duration) {
|
||||
self.total_succeeded += 1;
|
||||
self.time_spent.push(time_spent.as_millis() as usize);
|
||||
}
|
||||
}
|
||||
|
||||
impl Aggregate for ChatCompletionAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Chat Completion POST"
|
||||
}
|
||||
|
||||
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
mut time_spent,
|
||||
total_messages,
|
||||
total_streamed_requests,
|
||||
total_non_streamed_requests,
|
||||
models_used,
|
||||
..
|
||||
} = *new;
|
||||
|
||||
// Aggregate time spent
|
||||
self.time_spent.append(&mut time_spent);
|
||||
|
||||
// Aggregate counters
|
||||
self.total_received = self.total_received.saturating_add(total_received);
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(total_succeeded);
|
||||
self.total_messages = self.total_messages.saturating_add(total_messages);
|
||||
self.total_streamed_requests =
|
||||
self.total_streamed_requests.saturating_add(total_streamed_requests);
|
||||
self.total_non_streamed_requests =
|
||||
self.total_non_streamed_requests.saturating_add(total_non_streamed_requests);
|
||||
|
||||
// Aggregate model usage
|
||||
for (model, count) in models_used {
|
||||
*self.models_used.entry(model).or_insert(0) += count;
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
time_spent,
|
||||
total_messages,
|
||||
total_streamed_requests,
|
||||
total_non_streamed_requests,
|
||||
models_used,
|
||||
..
|
||||
} = *self;
|
||||
|
||||
// Compute time statistics
|
||||
let time_spent: Vec<usize> = time_spent.into_sorted_vec();
|
||||
let (max_time, min_time, avg_time) = if time_spent.is_empty() {
|
||||
(0, 0, 0)
|
||||
} else {
|
||||
let max_time = time_spent.last().unwrap_or(&0);
|
||||
let min_time = time_spent.first().unwrap_or(&0);
|
||||
let sum: usize = time_spent.iter().sum();
|
||||
let avg_time = sum / time_spent.len();
|
||||
(*max_time, *min_time, avg_time)
|
||||
};
|
||||
|
||||
// Compute average messages per request
|
||||
let avg_messages_per_request =
|
||||
if total_received > 0 { total_messages as f64 / total_received as f64 } else { 0.0 };
|
||||
|
||||
// Compute streaming vs non-streaming proportions
|
||||
let streaming_ratio = if total_received > 0 {
|
||||
total_streamed_requests as f64 / total_received as f64
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
|
||||
json!({
|
||||
"total_received": total_received,
|
||||
"total_succeeded": total_succeeded,
|
||||
"time_spent": {
|
||||
"max": max_time,
|
||||
"min": min_time,
|
||||
"avg": avg_time
|
||||
},
|
||||
"total_messages": total_messages,
|
||||
"avg_messages_per_request": avg_messages_per_request,
|
||||
"total_streamed_requests": total_streamed_requests,
|
||||
"total_non_streamed_requests": total_non_streamed_requests,
|
||||
"streaming_ratio": streaming_ratio,
|
||||
"models_used": models_used,
|
||||
})
|
||||
}
|
||||
}
|
@ -13,9 +13,9 @@ use async_openai::types::{
|
||||
ChatCompletionRequestDeveloperMessageContent, ChatCompletionRequestMessage,
|
||||
ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent,
|
||||
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
|
||||
ChatCompletionStreamResponseDelta, ChatCompletionToolArgs, ChatCompletionToolType,
|
||||
CreateChatCompletionRequest, CreateChatCompletionStreamResponse, FinishReason, FunctionCall,
|
||||
FunctionCallStream, FunctionObjectArgs,
|
||||
ChatCompletionStreamOptions, ChatCompletionStreamResponseDelta, ChatCompletionToolArgs,
|
||||
ChatCompletionToolType, CreateChatCompletionRequest, CreateChatCompletionStreamResponse,
|
||||
FinishReason, FunctionCall, FunctionCallStream, FunctionObjectArgs,
|
||||
};
|
||||
use async_openai::Client;
|
||||
use bumpalo::Bump;
|
||||
@ -36,6 +36,7 @@ use serde_json::json;
|
||||
use tokio::runtime::Handle;
|
||||
use tokio::sync::mpsc::error::SendError;
|
||||
|
||||
use super::chat_completion_analytics::ChatCompletionAggregator;
|
||||
use super::config::Config;
|
||||
use super::errors::{MistralError, OpenAiOutsideError, StreamErrorEvent};
|
||||
use super::utils::format_documents;
|
||||
@ -43,10 +44,15 @@ use super::{
|
||||
ChatsParam, MEILI_APPEND_CONVERSATION_MESSAGE_NAME, MEILI_SEARCH_IN_INDEX_FUNCTION_NAME,
|
||||
MEILI_SEARCH_PROGRESS_NAME, MEILI_SEARCH_SOURCES_NAME,
|
||||
};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
|
||||
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
||||
use crate::metrics::{
|
||||
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE, MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE,
|
||||
MEILISEARCH_CHAT_SEARCH_REQUESTS, MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE,
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS,
|
||||
};
|
||||
use crate::routes::chats::utils::SseEventSender;
|
||||
use crate::routes::indexes::search::search_kind;
|
||||
use crate::search::{add_search_rules, prepare_search, search_from_kind, SearchQuery};
|
||||
@ -64,6 +70,7 @@ async fn chat(
|
||||
req: HttpRequest,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
web::Json(chat_completion): web::Json<CreateChatCompletionRequest>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> impl Responder {
|
||||
let ChatsParam { workspace_uid } = chats_param.into_inner();
|
||||
|
||||
@ -76,6 +83,7 @@ async fn chat(
|
||||
&workspace_uid,
|
||||
req,
|
||||
chat_completion,
|
||||
analytics,
|
||||
)
|
||||
.await,
|
||||
)
|
||||
@ -88,6 +96,7 @@ async fn chat(
|
||||
&workspace_uid,
|
||||
req,
|
||||
chat_completion,
|
||||
analytics,
|
||||
)
|
||||
.await,
|
||||
)
|
||||
@ -281,7 +290,7 @@ async fn process_search_request(
|
||||
let output = output?;
|
||||
let mut documents = Vec::new();
|
||||
if let Ok((ref rtxn, ref search_result)) = output {
|
||||
// aggregate.succeed(search_result);
|
||||
MEILISEARCH_CHAT_SEARCH_REQUESTS.with_label_values(&["internal"]).inc();
|
||||
if search_result.degraded {
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
||||
}
|
||||
@ -315,9 +324,18 @@ async fn non_streamed_chat(
|
||||
workspace_uid: &str,
|
||||
req: HttpRequest,
|
||||
chat_completion: CreateChatCompletionRequest,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
|
||||
|
||||
// Create analytics aggregator
|
||||
let aggregate = ChatCompletionAggregator::from_request(
|
||||
&chat_completion.model,
|
||||
chat_completion.messages.len(),
|
||||
false, // non_streamed_chat is not streaming
|
||||
);
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
if let Some(n) = chat_completion.n.filter(|&n| n != 1) {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("You tried to specify n = {n} but only single choices are supported (n = 1)."),
|
||||
@ -414,6 +432,11 @@ async fn non_streamed_chat(
|
||||
}
|
||||
}
|
||||
|
||||
// Record success in analytics
|
||||
let mut aggregate = aggregate;
|
||||
aggregate.succeed(start_time.elapsed());
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
@ -424,6 +447,7 @@ async fn streamed_chat(
|
||||
workspace_uid: &str,
|
||||
req: HttpRequest,
|
||||
mut chat_completion: CreateChatCompletionRequest,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<impl Responder, ResponseError> {
|
||||
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
|
||||
let filters = index_scheduler.filters();
|
||||
@ -445,6 +469,14 @@ async fn streamed_chat(
|
||||
}
|
||||
};
|
||||
|
||||
// Create analytics aggregator
|
||||
let mut aggregate = ChatCompletionAggregator::from_request(
|
||||
&chat_completion.model,
|
||||
chat_completion.messages.len(),
|
||||
true, // streamed_chat is always streaming
|
||||
);
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
let config = Config::new(&chat_settings);
|
||||
let auth_token = extract_token_from_request(&req)?.unwrap().to_string();
|
||||
let system_role = chat_settings.source.system_role(&chat_completion.model);
|
||||
@ -460,6 +492,7 @@ async fn streamed_chat(
|
||||
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(10);
|
||||
let tx = SseEventSender::new(tx);
|
||||
let workspace_uid = workspace_uid.to_string();
|
||||
let _join_handle = Handle::current().spawn(async move {
|
||||
let client = Client::with_config(config.clone());
|
||||
let mut global_tool_calls = HashMap::<u32, Call>::new();
|
||||
@ -469,6 +502,7 @@ async fn streamed_chat(
|
||||
let output = run_conversation(
|
||||
&index_scheduler,
|
||||
&auth_ctrl,
|
||||
&workspace_uid,
|
||||
&search_queue,
|
||||
&auth_token,
|
||||
&client,
|
||||
@ -490,6 +524,10 @@ async fn streamed_chat(
|
||||
let _ = tx.stop().await;
|
||||
});
|
||||
|
||||
// Record success in analytics after the stream is set up
|
||||
aggregate.succeed(start_time.elapsed());
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
Ok(Sse::from_infallible_receiver(rx).with_retry_duration(Duration::from_secs(10)))
|
||||
}
|
||||
|
||||
@ -502,6 +540,7 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
auth_ctrl: &web::Data<AuthController>,
|
||||
workspace_uid: &str,
|
||||
search_queue: &web::Data<SearchQueue>,
|
||||
auth_token: &str,
|
||||
client: &Client<C>,
|
||||
@ -511,13 +550,34 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
global_tool_calls: &mut HashMap<u32, Call>,
|
||||
function_support: FunctionSupport,
|
||||
) -> Result<ControlFlow<Option<FinishReason>, ()>, SendError<Event>> {
|
||||
use DbChatCompletionSource::*;
|
||||
|
||||
let mut finish_reason = None;
|
||||
chat_completion.stream_options = match source {
|
||||
OpenAi | AzureOpenAi => Some(ChatCompletionStreamOptions { include_usage: true }),
|
||||
Mistral | VLlm => None,
|
||||
};
|
||||
|
||||
// safety: unwrap: can only happens if `stream` was set to `false`
|
||||
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
|
||||
while let Some(result) = response.next().await {
|
||||
match result {
|
||||
Ok(resp) => {
|
||||
let choice = &resp.choices[0];
|
||||
if let Some(usage) = resp.usage.as_ref() {
|
||||
MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE
|
||||
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||
.inc_by(usage.prompt_tokens as u64);
|
||||
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE
|
||||
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||
.inc_by(usage.completion_tokens as u64);
|
||||
MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE
|
||||
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||
.inc_by(usage.total_tokens as u64);
|
||||
}
|
||||
let choice = match resp.choices.first() {
|
||||
Some(choice) => choice,
|
||||
None => break,
|
||||
};
|
||||
finish_reason = choice.finish_reason;
|
||||
|
||||
let ChatCompletionStreamResponseDelta { ref tool_calls, .. } = &choice.delta;
|
||||
|
@ -13,7 +13,7 @@ impl Config {
|
||||
pub fn new(chat_settings: &DbChatSettings) -> Self {
|
||||
use meilisearch_types::features::ChatCompletionSource::*;
|
||||
match chat_settings.source {
|
||||
OpenAi | Mistral | Gemini | VLlm => {
|
||||
OpenAi | Mistral | VLlm => {
|
||||
let mut config = OpenAIConfig::default();
|
||||
if let Some(org_id) = chat_settings.org_id.as_ref() {
|
||||
config = config.with_org_id(org_id);
|
||||
|
@ -19,6 +19,7 @@ use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::routes::PAGINATION_DEFAULT_LIMIT;
|
||||
|
||||
mod chat_completion_analytics;
|
||||
pub mod chat_completions;
|
||||
mod config;
|
||||
mod errors;
|
||||
|
@ -218,7 +218,6 @@ pub enum ChatCompletionSource {
|
||||
#[default]
|
||||
OpenAi,
|
||||
Mistral,
|
||||
Gemini,
|
||||
AzureOpenAi,
|
||||
VLlm,
|
||||
}
|
||||
@ -229,7 +228,6 @@ impl From<ChatCompletionSource> for DbChatCompletionSource {
|
||||
match source {
|
||||
OpenAi => DbChatCompletionSource::OpenAi,
|
||||
Mistral => DbChatCompletionSource::Mistral,
|
||||
Gemini => DbChatCompletionSource::Gemini,
|
||||
AzureOpenAi => DbChatCompletionSource::AzureOpenAi,
|
||||
VLlm => DbChatCompletionSource::VLlm,
|
||||
}
|
||||
|
183
crates/meilisearch/src/routes/export.rs
Normal file
183
crates/meilisearch/src/routes/export.rs
Normal file
@ -0,0 +1,183 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::convert::Infallible;
|
||||
use std::str::FromStr as _;
|
||||
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use byte_unit::Byte;
|
||||
use deserr::actix_web::AwebJson;
|
||||
use deserr::Deserr;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::tasks::{ExportIndexSettings as DbExportIndexSettings, KindWithContent};
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use tracing::debug;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::routes::export_analytics::ExportAnalytics;
|
||||
use crate::routes::{get_task_id, is_dry_run, SummarizedTaskView};
|
||||
use crate::Opt;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
paths(export),
|
||||
tags((
|
||||
name = "Export",
|
||||
description = "The `/export` route allows you to trigger an export process to a remote Meilisearch instance.",
|
||||
external_docs(url = "https://www.meilisearch.com/docs/reference/api/export"),
|
||||
)),
|
||||
)]
|
||||
pub struct ExportApi;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(export)));
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "",
|
||||
tag = "Export",
|
||||
security(("Bearer" = ["export", "*"])),
|
||||
responses(
|
||||
(status = 202, description = "Export successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
|
||||
{
|
||||
"taskUid": 1,
|
||||
"status": "enqueued",
|
||||
"type": "export",
|
||||
"enqueuedAt": "2021-08-11T09:25:53.000000Z"
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||
"code": "missing_authorization_header",
|
||||
"type": "auth",
|
||||
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||
}
|
||||
)),
|
||||
)
|
||||
)]
|
||||
async fn export(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::EXPORT }>, Data<IndexScheduler>>,
|
||||
export: AwebJson<Export, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let export = export.into_inner();
|
||||
debug!(returns = ?export, "Trigger export");
|
||||
|
||||
let analytics_aggregate = ExportAnalytics::from_export(&export);
|
||||
|
||||
let Export { url, api_key, payload_size, indexes } = export;
|
||||
|
||||
let indexes = match indexes {
|
||||
Some(indexes) => indexes
|
||||
.into_iter()
|
||||
.map(|(pattern, ExportIndexSettings { filter, override_settings })| {
|
||||
(pattern, DbExportIndexSettings { filter, override_settings })
|
||||
})
|
||||
.collect(),
|
||||
None => BTreeMap::from([(
|
||||
IndexUidPattern::new_unchecked("*"),
|
||||
DbExportIndexSettings::default(),
|
||||
)]),
|
||||
};
|
||||
|
||||
let task = KindWithContent::Export {
|
||||
url,
|
||||
api_key,
|
||||
payload_size: payload_size.map(|ByteWithDeserr(bytes)| bytes),
|
||||
indexes,
|
||||
};
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
let dry_run = is_dry_run(&req, &opt)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
||||
.await??
|
||||
.into();
|
||||
|
||||
analytics.publish(analytics_aggregate, &req);
|
||||
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr, ToSchema, Serialize)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct Export {
|
||||
#[schema(value_type = Option<String>, example = json!("https://ms-1234.heaven.meilisearch.com"))]
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidExportUrl>)]
|
||||
pub url: String,
|
||||
#[schema(value_type = Option<String>, example = json!("1234abcd"))]
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidExportApiKey>)]
|
||||
pub api_key: Option<String>,
|
||||
#[schema(value_type = Option<String>, example = json!("24MiB"))]
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidExportPayloadSize>)]
|
||||
pub payload_size: Option<ByteWithDeserr>,
|
||||
#[schema(value_type = Option<BTreeMap<String, ExportIndexSettings>>, example = json!({ "*": { "filter": null } }))]
|
||||
#[deserr(default)]
|
||||
#[serde(default)]
|
||||
pub indexes: Option<BTreeMap<IndexUidPattern, ExportIndexSettings>>,
|
||||
}
|
||||
|
||||
/// A wrapper around the `Byte` type that implements `Deserr`.
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct ByteWithDeserr(pub Byte);
|
||||
|
||||
impl<E> deserr::Deserr<E> for ByteWithDeserr
|
||||
where
|
||||
E: deserr::DeserializeError,
|
||||
{
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
value: deserr::Value<V>,
|
||||
location: deserr::ValuePointerRef,
|
||||
) -> Result<Self, E> {
|
||||
use deserr::{ErrorKind, Value, ValueKind};
|
||||
match value {
|
||||
Value::Integer(integer) => Ok(ByteWithDeserr(Byte::from_u64(integer))),
|
||||
Value::String(string) => Byte::from_str(&string).map(ByteWithDeserr).map_err(|e| {
|
||||
deserr::take_cf_content(E::error::<Infallible>(
|
||||
None,
|
||||
ErrorKind::Unexpected { msg: e.to_string() },
|
||||
location,
|
||||
))
|
||||
}),
|
||||
actual => Err(deserr::take_cf_content(E::error(
|
||||
None,
|
||||
ErrorKind::IncorrectValueKind {
|
||||
actual,
|
||||
accepted: &[ValueKind::Integer, ValueKind::String],
|
||||
},
|
||||
location,
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr, ToSchema, Serialize)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct ExportIndexSettings {
|
||||
#[schema(value_type = Option<String>, example = json!("genres = action"))]
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidExportIndexFilter>)]
|
||||
pub filter: Option<Value>,
|
||||
#[schema(value_type = Option<bool>, example = json!(true))]
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidExportIndexOverrideSettings>)]
|
||||
pub override_settings: bool,
|
||||
}
|
111
crates/meilisearch/src/routes/export_analytics.rs
Normal file
111
crates/meilisearch/src/routes/export_analytics.rs
Normal file
@ -0,0 +1,111 @@
|
||||
use url::Url;
|
||||
|
||||
use crate::analytics::Aggregate;
|
||||
use crate::routes::export::Export;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ExportAnalytics {
|
||||
total_received: usize,
|
||||
has_api_key: bool,
|
||||
sum_exports_meilisearch_cloud: usize,
|
||||
sum_index_patterns: usize,
|
||||
sum_patterns_with_filter: usize,
|
||||
sum_patterns_with_override_settings: usize,
|
||||
payload_sizes: Vec<u64>,
|
||||
}
|
||||
|
||||
impl ExportAnalytics {
|
||||
pub fn from_export(export: &Export) -> Self {
|
||||
let Export { url, api_key, payload_size, indexes } = export;
|
||||
|
||||
let url = Url::parse(url).ok();
|
||||
let is_meilisearch_cloud = url.as_ref().and_then(Url::host_str).is_some_and(|host| {
|
||||
host.ends_with("meilisearch.dev")
|
||||
|| host.ends_with("meilisearch.com")
|
||||
|| host.ends_with("meilisearch.io")
|
||||
});
|
||||
let has_api_key = api_key.is_some();
|
||||
let index_patterns_count = indexes.as_ref().map_or(0, |indexes| indexes.len());
|
||||
let patterns_with_filter_count = indexes.as_ref().map_or(0, |indexes| {
|
||||
indexes.values().filter(|settings| settings.filter.is_some()).count()
|
||||
});
|
||||
let patterns_with_override_settings_count = indexes.as_ref().map_or(0, |indexes| {
|
||||
indexes.values().filter(|settings| settings.override_settings).count()
|
||||
});
|
||||
let payload_sizes =
|
||||
if let Some(crate::routes::export::ByteWithDeserr(byte_size)) = payload_size {
|
||||
vec![byte_size.as_u64()]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
Self {
|
||||
total_received: 1,
|
||||
has_api_key,
|
||||
sum_exports_meilisearch_cloud: is_meilisearch_cloud as usize,
|
||||
sum_index_patterns: index_patterns_count,
|
||||
sum_patterns_with_filter: patterns_with_filter_count,
|
||||
sum_patterns_with_override_settings: patterns_with_override_settings_count,
|
||||
payload_sizes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Aggregate for ExportAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Export Triggered"
|
||||
}
|
||||
|
||||
fn aggregate(mut self: Box<Self>, other: Box<Self>) -> Box<Self> {
|
||||
self.total_received += other.total_received;
|
||||
self.has_api_key |= other.has_api_key;
|
||||
self.sum_exports_meilisearch_cloud += other.sum_exports_meilisearch_cloud;
|
||||
self.sum_index_patterns += other.sum_index_patterns;
|
||||
self.sum_patterns_with_filter += other.sum_patterns_with_filter;
|
||||
self.sum_patterns_with_override_settings += other.sum_patterns_with_override_settings;
|
||||
self.payload_sizes.extend(other.payload_sizes);
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let avg_payload_size = if self.payload_sizes.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(self.payload_sizes.iter().sum::<u64>() / self.payload_sizes.len() as u64)
|
||||
};
|
||||
|
||||
let avg_exports_meilisearch_cloud = if self.total_received == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(self.sum_exports_meilisearch_cloud as f64 / self.total_received as f64)
|
||||
};
|
||||
|
||||
let avg_index_patterns = if self.total_received == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(self.sum_index_patterns as f64 / self.total_received as f64)
|
||||
};
|
||||
|
||||
let avg_patterns_with_filter = if self.total_received == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(self.sum_patterns_with_filter as f64 / self.total_received as f64)
|
||||
};
|
||||
|
||||
let avg_patterns_with_override_settings = if self.total_received == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(self.sum_patterns_with_override_settings as f64 / self.total_received as f64)
|
||||
};
|
||||
|
||||
serde_json::json!({
|
||||
"total_received": self.total_received,
|
||||
"has_api_key": self.has_api_key,
|
||||
"avg_exports_meilisearch_cloud": avg_exports_meilisearch_cloud,
|
||||
"avg_index_patterns": avg_index_patterns,
|
||||
"avg_patterns_with_filter": avg_patterns_with_filter,
|
||||
"avg_patterns_with_override_settings": avg_patterns_with_override_settings,
|
||||
"avg_payload_size": avg_payload_size,
|
||||
})
|
||||
}
|
||||
}
|
@ -54,6 +54,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
get_task_documents_route: Some(false),
|
||||
composite_embedders: Some(false),
|
||||
chat_completions: Some(false),
|
||||
multimodal: Some(false),
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
@ -100,6 +101,8 @@ pub struct RuntimeTogglableFeatures {
|
||||
pub composite_embedders: Option<bool>,
|
||||
#[deserr(default)]
|
||||
pub chat_completions: Option<bool>,
|
||||
#[deserr(default)]
|
||||
pub multimodal: Option<bool>,
|
||||
}
|
||||
|
||||
impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures {
|
||||
@ -113,6 +116,7 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
|
||||
get_task_documents_route,
|
||||
composite_embedders,
|
||||
chat_completions,
|
||||
multimodal,
|
||||
} = value;
|
||||
|
||||
Self {
|
||||
@ -124,6 +128,7 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
|
||||
get_task_documents_route: Some(get_task_documents_route),
|
||||
composite_embedders: Some(composite_embedders),
|
||||
chat_completions: Some(chat_completions),
|
||||
multimodal: Some(multimodal),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -138,6 +143,7 @@ pub struct PatchExperimentalFeatureAnalytics {
|
||||
get_task_documents_route: bool,
|
||||
composite_embedders: bool,
|
||||
chat_completions: bool,
|
||||
multimodal: bool,
|
||||
}
|
||||
|
||||
impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
@ -155,6 +161,7 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
get_task_documents_route: new.get_task_documents_route,
|
||||
composite_embedders: new.composite_embedders,
|
||||
chat_completions: new.chat_completions,
|
||||
multimodal: new.multimodal,
|
||||
})
|
||||
}
|
||||
|
||||
@ -181,6 +188,7 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
get_task_documents_route: Some(false),
|
||||
composite_embedders: Some(false),
|
||||
chat_completions: Some(false),
|
||||
multimodal: Some(false),
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
@ -223,6 +231,7 @@ async fn patch_features(
|
||||
.composite_embedders
|
||||
.unwrap_or(old_features.composite_embedders),
|
||||
chat_completions: new_features.0.chat_completions.unwrap_or(old_features.chat_completions),
|
||||
multimodal: new_features.0.multimodal.unwrap_or(old_features.multimodal),
|
||||
};
|
||||
|
||||
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
|
||||
@ -237,6 +246,7 @@ async fn patch_features(
|
||||
get_task_documents_route,
|
||||
composite_embedders,
|
||||
chat_completions,
|
||||
multimodal,
|
||||
} = new_features;
|
||||
|
||||
analytics.publish(
|
||||
@ -249,6 +259,7 @@ async fn patch_features(
|
||||
get_task_documents_route,
|
||||
composite_embedders,
|
||||
chat_completions,
|
||||
multimodal,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
@ -1452,7 +1452,6 @@ fn some_documents<'a, 't: 'a>(
|
||||
) -> Result<impl Iterator<Item = Result<Document, ResponseError>> + 'a, ResponseError> {
|
||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
||||
let embedding_configs = index.embedding_configs(rtxn)?;
|
||||
|
||||
Ok(index.iter_documents(rtxn, doc_ids)?.map(move |ret| {
|
||||
ret.map_err(ResponseError::from).and_then(|(key, document)| -> Result<_, ResponseError> {
|
||||
@ -1468,15 +1467,9 @@ fn some_documents<'a, 't: 'a>(
|
||||
Some(Value::Object(map)) => map,
|
||||
_ => Default::default(),
|
||||
};
|
||||
for (name, vector) in index.embeddings(rtxn, key)? {
|
||||
let user_provided = embedding_configs
|
||||
.iter()
|
||||
.find(|conf| conf.name == name)
|
||||
.is_some_and(|conf| conf.user_provided.contains(key));
|
||||
let embeddings = ExplicitVectors {
|
||||
embeddings: Some(vector.into()),
|
||||
regenerate: !user_provided,
|
||||
};
|
||||
for (name, (vector, regenerate)) in index.embeddings(rtxn, key)? {
|
||||
let embeddings =
|
||||
ExplicitVectors { embeddings: Some(vector.into()), regenerate };
|
||||
vectors.insert(
|
||||
name,
|
||||
serde_json::to_value(embeddings).map_err(MeilisearchHttpError::from)?,
|
||||
|
@ -56,6 +56,8 @@ pub struct FacetSearchQuery {
|
||||
pub q: Option<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
|
||||
pub vector: Option<Vec<f32>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
|
||||
pub media: Option<Value>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
|
||||
pub hybrid: Option<HybridQuery>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
|
||||
@ -94,6 +96,7 @@ impl FacetSearchAggregator {
|
||||
facet_name,
|
||||
vector,
|
||||
q,
|
||||
media,
|
||||
filter,
|
||||
matching_strategy,
|
||||
attributes_to_search_on,
|
||||
@ -108,6 +111,7 @@ impl FacetSearchAggregator {
|
||||
facet_names: Some(facet_name.clone()).into_iter().collect(),
|
||||
additional_search_parameters_provided: q.is_some()
|
||||
|| vector.is_some()
|
||||
|| media.is_some()
|
||||
|| filter.is_some()
|
||||
|| *matching_strategy != MatchingStrategy::default()
|
||||
|| attributes_to_search_on.is_some()
|
||||
@ -291,6 +295,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
||||
facet_name: _,
|
||||
q,
|
||||
vector,
|
||||
media,
|
||||
filter,
|
||||
matching_strategy,
|
||||
attributes_to_search_on,
|
||||
@ -312,6 +317,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
||||
|
||||
SearchQuery {
|
||||
q,
|
||||
media,
|
||||
offset: DEFAULT_SEARCH_OFFSET(),
|
||||
limit: DEFAULT_SEARCH_LIMIT(),
|
||||
page,
|
||||
|
@ -205,6 +205,8 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
|
||||
|
||||
Ok(Self {
|
||||
q: other.q,
|
||||
// `media` not supported for `GET`
|
||||
media: None,
|
||||
vector: other.vector.map(CS::into_inner),
|
||||
offset: other.offset.0,
|
||||
limit: other.limit.0,
|
||||
@ -481,28 +483,30 @@ pub fn search_kind(
|
||||
index_uid: String,
|
||||
index: &milli::Index,
|
||||
) -> Result<SearchKind, ResponseError> {
|
||||
let is_placeholder_query =
|
||||
if let Some(q) = query.q.as_deref() { q.trim().is_empty() } else { true };
|
||||
let non_placeholder_query = !is_placeholder_query;
|
||||
let is_media = query.media.is_some();
|
||||
// handle with care, the order of cases matters, the semantics is subtle
|
||||
match (query.q.as_deref(), &query.hybrid, query.vector.as_deref()) {
|
||||
// empty query, no vector => placeholder search
|
||||
(Some(q), _, None) if q.trim().is_empty() => Ok(SearchKind::KeywordOnly),
|
||||
// no query, no vector => placeholder search
|
||||
(None, _, None) => Ok(SearchKind::KeywordOnly),
|
||||
// hybrid.semantic_ratio == 1.0 => vector
|
||||
(_, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => {
|
||||
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
|
||||
}
|
||||
// hybrid.semantic_ratio == 0.0 => keyword
|
||||
(_, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => {
|
||||
match (is_media, non_placeholder_query, &query.hybrid, query.vector.as_deref()) {
|
||||
// media + vector => error
|
||||
(true, _, _, Some(_)) => Err(MeilisearchHttpError::MediaAndVector.into()),
|
||||
// media + !hybrid => error
|
||||
(true, _, None, _) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
|
||||
// vector + !hybrid => error
|
||||
(_, _, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
|
||||
// hybrid S0 => keyword
|
||||
(_, _, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => {
|
||||
Ok(SearchKind::KeywordOnly)
|
||||
}
|
||||
// no query, hybrid, vector => semantic
|
||||
(None, Some(HybridQuery { semantic_ratio: _, embedder }), Some(v)) => {
|
||||
SearchKind::semantic(index_scheduler, index_uid, index, embedder, Some(v.len()))
|
||||
// !q + !vector => placeholder search
|
||||
(false, false, _, None) => Ok(SearchKind::KeywordOnly),
|
||||
// hybrid S100 => semantic
|
||||
(_, _, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => {
|
||||
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
|
||||
}
|
||||
// query, no hybrid, no vector => keyword
|
||||
(Some(_), None, None) => Ok(SearchKind::KeywordOnly),
|
||||
// query, hybrid, maybe vector => hybrid
|
||||
(Some(_), Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid(
|
||||
// q + hybrid => hybrid
|
||||
(_, true, Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid(
|
||||
index_scheduler,
|
||||
index_uid,
|
||||
index,
|
||||
@ -510,7 +514,11 @@ pub fn search_kind(
|
||||
**semantic_ratio,
|
||||
v.map(|v| v.len()),
|
||||
),
|
||||
|
||||
(_, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
|
||||
// !q + hybrid => semantic
|
||||
(_, false, Some(HybridQuery { semantic_ratio: _, embedder }), v) => {
|
||||
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
|
||||
}
|
||||
// q => keyword
|
||||
(false, true, None, None) => Ok(SearchKind::KeywordOnly),
|
||||
}
|
||||
}
|
||||
|
@ -61,6 +61,8 @@ pub struct SearchAggregator<Method: AggregateMethod> {
|
||||
semantic_ratio: bool,
|
||||
hybrid: bool,
|
||||
retrieve_vectors: bool,
|
||||
// Number of requests containing `media`
|
||||
total_media: usize,
|
||||
|
||||
// every time a search is done, we increment the counter linked to the used settings
|
||||
matching_strategy: HashMap<String, usize>,
|
||||
@ -101,6 +103,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
let SearchQuery {
|
||||
q,
|
||||
vector,
|
||||
media,
|
||||
offset,
|
||||
limit,
|
||||
page,
|
||||
@ -175,6 +178,11 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
if let Some(ref vector) = vector {
|
||||
ret.max_vector_size = vector.len();
|
||||
}
|
||||
|
||||
if media.is_some() {
|
||||
ret.total_media = 1;
|
||||
}
|
||||
|
||||
ret.retrieve_vectors |= retrieve_vectors;
|
||||
|
||||
if query.is_finite_pagination() {
|
||||
@ -277,6 +285,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
show_ranking_score_details,
|
||||
semantic_ratio,
|
||||
hybrid,
|
||||
total_media,
|
||||
total_degraded,
|
||||
total_used_negative_operator,
|
||||
ranking_score_threshold,
|
||||
@ -327,6 +336,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
self.retrieve_vectors |= retrieve_vectors;
|
||||
self.semantic_ratio |= semantic_ratio;
|
||||
self.hybrid |= hybrid;
|
||||
self.total_media += total_media;
|
||||
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(max_limit);
|
||||
@ -403,6 +413,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
show_ranking_score_details,
|
||||
semantic_ratio,
|
||||
hybrid,
|
||||
total_media,
|
||||
total_degraded,
|
||||
total_used_negative_operator,
|
||||
ranking_score_threshold,
|
||||
@ -450,6 +461,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
"hybrid": {
|
||||
"enabled": hybrid,
|
||||
"semantic_ratio": semantic_ratio,
|
||||
"total_media": total_media,
|
||||
},
|
||||
"pagination": {
|
||||
"max_limit": max_limit,
|
||||
|
@ -755,6 +755,14 @@ fn validate_settings(
|
||||
if matches!(embedder.indexing_embedder, Setting::Set(_)) {
|
||||
features.check_composite_embedders("setting `indexingEmbedder`")?;
|
||||
}
|
||||
|
||||
if matches!(embedder.indexing_fragments, Setting::Set(_)) {
|
||||
features.check_multimodal("setting `indexingFragments`")?;
|
||||
}
|
||||
|
||||
if matches!(embedder.search_fragments, Setting::Set(_)) {
|
||||
features.check_multimodal("setting `searchFragments`")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,7 @@ use std::collections::BTreeMap;
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use export::Export;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::batch_view::BatchView;
|
||||
@ -54,6 +55,8 @@ mod api_key;
|
||||
pub mod batches;
|
||||
pub mod chats;
|
||||
mod dump;
|
||||
mod export;
|
||||
mod export_analytics;
|
||||
pub mod features;
|
||||
pub mod indexes;
|
||||
mod logs;
|
||||
@ -84,6 +87,7 @@ mod tasks_test;
|
||||
(path = "/multi-search", api = multi_search::MultiSearchApi),
|
||||
(path = "/swap-indexes", api = swap_indexes::SwapIndexesApi),
|
||||
(path = "/experimental-features", api = features::ExperimentalFeaturesApi),
|
||||
(path = "/export", api = export::ExportApi),
|
||||
(path = "/network", api = network::NetworkApi),
|
||||
),
|
||||
paths(get_health, get_version, get_stats),
|
||||
@ -95,7 +99,7 @@ mod tasks_test;
|
||||
url = "/",
|
||||
description = "Local server",
|
||||
)),
|
||||
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures))
|
||||
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures, Export))
|
||||
)]
|
||||
pub struct MeilisearchApi;
|
||||
|
||||
@ -115,6 +119,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/metrics").configure(metrics::configure))
|
||||
.service(web::scope("/experimental-features").configure(features::configure))
|
||||
.service(web::scope("/network").configure(network::configure))
|
||||
.service(web::scope("/export").configure(export::configure))
|
||||
.service(web::scope("/chats").configure(chats::configure));
|
||||
|
||||
#[cfg(feature = "swagger")]
|
||||
|
@ -42,6 +42,7 @@ impl MultiSearchAggregator {
|
||||
federation_options,
|
||||
q: _,
|
||||
vector: _,
|
||||
media: _,
|
||||
offset: _,
|
||||
limit: _,
|
||||
page: _,
|
||||
|
@ -228,7 +228,7 @@ mod tests {
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
|
||||
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
|
@ -64,6 +64,8 @@ pub struct SearchQuery {
|
||||
pub q: Option<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
|
||||
pub vector: Option<Vec<f32>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
|
||||
pub media: Option<serde_json::Value>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
|
||||
pub hybrid: Option<HybridQuery>,
|
||||
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
|
||||
@ -147,6 +149,7 @@ impl From<SearchParameters> for SearchQuery {
|
||||
ranking_score_threshold: ranking_score_threshold.map(RankingScoreThreshold::from),
|
||||
q: None,
|
||||
vector: None,
|
||||
media: None,
|
||||
offset: DEFAULT_SEARCH_OFFSET(),
|
||||
page: None,
|
||||
hits_per_page: None,
|
||||
@ -220,6 +223,7 @@ impl fmt::Debug for SearchQuery {
|
||||
let Self {
|
||||
q,
|
||||
vector,
|
||||
media,
|
||||
hybrid,
|
||||
offset,
|
||||
limit,
|
||||
@ -274,6 +278,9 @@ impl fmt::Debug for SearchQuery {
|
||||
);
|
||||
}
|
||||
}
|
||||
if let Some(media) = media {
|
||||
debug.field("media", media);
|
||||
}
|
||||
if let Some(hybrid) = hybrid {
|
||||
debug.field("hybrid", &hybrid);
|
||||
}
|
||||
@ -399,10 +406,10 @@ impl SearchKind {
|
||||
route: Route,
|
||||
) -> Result<(String, Arc<Embedder>, bool), ResponseError> {
|
||||
let rtxn = index.read_txn()?;
|
||||
let embedder_configs = index.embedding_configs(&rtxn)?;
|
||||
let embedder_configs = index.embedding_configs().embedding_configs(&rtxn)?;
|
||||
let embedders = index_scheduler.embedders(index_uid, embedder_configs)?;
|
||||
|
||||
let (embedder, _, quantized) = embedders
|
||||
let (embedder, quantized) = embedders
|
||||
.get(embedder_name)
|
||||
.ok_or(match route {
|
||||
Route::Search | Route::MultiSearch => {
|
||||
@ -412,6 +419,7 @@ impl SearchKind {
|
||||
milli::UserError::InvalidSimilarEmbedder(embedder_name.to_owned())
|
||||
}
|
||||
})
|
||||
.map(|runtime| (runtime.embedder.clone(), runtime.is_quantized))
|
||||
.map_err(milli::Error::from)?;
|
||||
|
||||
if let Some(vector_len) = vector_len {
|
||||
@ -481,8 +489,10 @@ pub struct SearchQueryWithIndex {
|
||||
pub index_uid: IndexUid,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
||||
pub q: Option<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
|
||||
pub vector: Option<Vec<f32>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
|
||||
pub media: Option<serde_json::Value>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
|
||||
pub hybrid: Option<HybridQuery>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchOffset>)]
|
||||
@ -563,6 +573,7 @@ impl SearchQueryWithIndex {
|
||||
let SearchQuery {
|
||||
q,
|
||||
vector,
|
||||
media,
|
||||
hybrid,
|
||||
offset,
|
||||
limit,
|
||||
@ -593,6 +604,7 @@ impl SearchQueryWithIndex {
|
||||
index_uid,
|
||||
q,
|
||||
vector,
|
||||
media,
|
||||
hybrid,
|
||||
offset: if offset == DEFAULT_SEARCH_OFFSET() { None } else { Some(offset) },
|
||||
limit: if limit == DEFAULT_SEARCH_LIMIT() { None } else { Some(limit) },
|
||||
@ -627,6 +639,7 @@ impl SearchQueryWithIndex {
|
||||
federation_options,
|
||||
q,
|
||||
vector,
|
||||
media,
|
||||
offset,
|
||||
limit,
|
||||
page,
|
||||
@ -657,6 +670,7 @@ impl SearchQueryWithIndex {
|
||||
SearchQuery {
|
||||
q,
|
||||
vector,
|
||||
media,
|
||||
offset: offset.unwrap_or(DEFAULT_SEARCH_OFFSET()),
|
||||
limit: limit.unwrap_or(DEFAULT_SEARCH_LIMIT()),
|
||||
page,
|
||||
@ -958,6 +972,9 @@ pub fn prepare_search<'t>(
|
||||
time_budget: TimeBudget,
|
||||
features: RoFeatures,
|
||||
) -> Result<(milli::Search<'t>, bool, usize, usize), ResponseError> {
|
||||
if query.media.is_some() {
|
||||
features.check_multimodal("passing `media` in a search query")?;
|
||||
}
|
||||
let mut search = index.search(rtxn);
|
||||
search.time_budget(time_budget);
|
||||
if let Some(ranking_score_threshold) = query.ranking_score_threshold {
|
||||
@ -983,14 +1000,27 @@ pub fn prepare_search<'t>(
|
||||
|
||||
let deadline = std::time::Instant::now() + std::time::Duration::from_secs(10);
|
||||
|
||||
let q = query.q.as_deref();
|
||||
let media = query.media.as_ref();
|
||||
|
||||
let search_query = match (q, media) {
|
||||
(Some(text), None) => milli::vector::SearchQuery::Text(text),
|
||||
(q, media) => milli::vector::SearchQuery::Media { q, media },
|
||||
};
|
||||
|
||||
embedder
|
||||
.embed_search(query.q.as_ref().unwrap(), Some(deadline))
|
||||
.embed_search(search_query, Some(deadline))
|
||||
.map_err(milli::vector::Error::from)
|
||||
.map_err(milli::Error::from)?
|
||||
}
|
||||
};
|
||||
|
||||
search.semantic(embedder_name.clone(), embedder.clone(), *quantized, Some(vector));
|
||||
search.semantic(
|
||||
embedder_name.clone(),
|
||||
embedder.clone(),
|
||||
*quantized,
|
||||
Some(vector),
|
||||
query.media.clone(),
|
||||
);
|
||||
}
|
||||
SearchKind::Hybrid { embedder_name, embedder, quantized, semantic_ratio: _ } => {
|
||||
if let Some(q) = &query.q {
|
||||
@ -1002,6 +1032,7 @@ pub fn prepare_search<'t>(
|
||||
embedder.clone(),
|
||||
*quantized,
|
||||
query.vector.clone(),
|
||||
query.media.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -1126,6 +1157,7 @@ pub fn perform_search(
|
||||
locales,
|
||||
// already used in prepare_search
|
||||
vector: _,
|
||||
media: _,
|
||||
hybrid: _,
|
||||
offset: _,
|
||||
ranking_score_threshold: _,
|
||||
@ -1328,7 +1360,6 @@ struct HitMaker<'a> {
|
||||
vectors_fid: Option<FieldId>,
|
||||
retrieve_vectors: RetrieveVectors,
|
||||
to_retrieve_ids: BTreeSet<FieldId>,
|
||||
embedding_configs: Vec<index::IndexEmbeddingConfig>,
|
||||
formatter_builder: MatcherBuilder<'a>,
|
||||
formatted_options: BTreeMap<FieldId, FormatOptions>,
|
||||
show_ranking_score: bool,
|
||||
@ -1443,8 +1474,6 @@ impl<'a> HitMaker<'a> {
|
||||
&displayed_ids,
|
||||
);
|
||||
|
||||
let embedding_configs = index.embedding_configs(rtxn)?;
|
||||
|
||||
Ok(Self {
|
||||
index,
|
||||
rtxn,
|
||||
@ -1453,7 +1482,6 @@ impl<'a> HitMaker<'a> {
|
||||
vectors_fid,
|
||||
retrieve_vectors,
|
||||
to_retrieve_ids,
|
||||
embedding_configs,
|
||||
formatter_builder,
|
||||
formatted_options,
|
||||
show_ranking_score: format.show_ranking_score,
|
||||
@ -1499,14 +1527,8 @@ impl<'a> HitMaker<'a> {
|
||||
Some(Value::Object(map)) => map,
|
||||
_ => Default::default(),
|
||||
};
|
||||
for (name, vector) in self.index.embeddings(self.rtxn, id)? {
|
||||
let user_provided = self
|
||||
.embedding_configs
|
||||
.iter()
|
||||
.find(|conf| conf.name == name)
|
||||
.is_some_and(|conf| conf.user_provided.contains(id));
|
||||
let embeddings =
|
||||
ExplicitVectors { embeddings: Some(vector.into()), regenerate: !user_provided };
|
||||
for (name, (vector, regenerate)) in self.index.embeddings(self.rtxn, id)? {
|
||||
let embeddings = ExplicitVectors { embeddings: Some(vector.into()), regenerate };
|
||||
vectors.insert(
|
||||
name,
|
||||
serde_json::to_value(embeddings).map_err(InternalError::SerdeJson)?,
|
||||
|
@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `*.get`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
|
@ -42,7 +42,7 @@ async fn batch_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
|
@ -465,6 +465,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
skip_index_budget: true,
|
||||
// Having 2 threads makes the tests way faster
|
||||
max_indexing_threads: MaxThreads::from_str("2").unwrap(),
|
||||
experimental_no_edition_2024_for_settings: false,
|
||||
},
|
||||
experimental_enable_metrics: false,
|
||||
..Parser::parse_from(None as Option<&str>)
|
||||
|
@ -293,7 +293,7 @@ async fn add_csv_document() {
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"#);
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@ -358,7 +358,7 @@ async fn add_csv_document_with_types() {
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"#);
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@ -434,7 +434,7 @@ async fn add_csv_document_with_custom_delimiter() {
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"#);
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@ -991,7 +991,7 @@ async fn add_documents_no_index_creation() {
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(response,
|
||||
@r###"
|
||||
@ -1068,7 +1068,7 @@ async fn document_addition_with_primary_key() {
|
||||
}
|
||||
"#);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get_task(response.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
@ -1120,7 +1120,7 @@ async fn document_addition_with_huge_int_primary_key() {
|
||||
let (response, code) = index.add_documents(documents, Some("primary")).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(response,
|
||||
@r###"
|
||||
{
|
||||
@ -1178,7 +1178,7 @@ async fn replace_document() {
|
||||
}
|
||||
"#);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@ -1190,7 +1190,7 @@ async fn replace_document() {
|
||||
let (task, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code,@"202 Accepted");
|
||||
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
@ -1362,7 +1362,7 @@ async fn error_add_documents_bad_document_id() {
|
||||
}
|
||||
]);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -1399,7 +1399,7 @@ async fn error_add_documents_bad_document_id() {
|
||||
}
|
||||
]);
|
||||
let (value, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.failed();
|
||||
server.wait_task(value.uid()).await.failed();
|
||||
let (response, code) = index.get_task(value.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -1436,7 +1436,7 @@ async fn error_add_documents_bad_document_id() {
|
||||
}
|
||||
]);
|
||||
let (value, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.failed();
|
||||
server.wait_task(value.uid()).await.failed();
|
||||
let (response, code) = index.get_task(value.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -1478,7 +1478,7 @@ async fn error_add_documents_missing_document_id() {
|
||||
}
|
||||
]);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -1527,7 +1527,7 @@ async fn error_document_field_limit_reached_in_one_document() {
|
||||
let (response, code) = index.update_documents(documents, Some("id")).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.failed();
|
||||
let response = server.wait_task(response.uid()).await.failed();
|
||||
snapshot!(code, @"202 Accepted");
|
||||
// Documents without a primary key are not accepted.
|
||||
snapshot!(response,
|
||||
@ -1576,7 +1576,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
|
||||
let (response, code) = index.update_documents(documents, Some("id")).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(response,
|
||||
@r###"
|
||||
@ -1611,7 +1611,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
|
||||
let (response, code) = index.update_documents(documents, Some("id")).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.failed();
|
||||
let response = server.wait_task(response.uid()).await.failed();
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(response,
|
||||
@r###"
|
||||
@ -1660,7 +1660,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() {
|
||||
let (response, code) = index.update_documents(documents, Some("id")).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(code, @"202 Accepted");
|
||||
// Documents without a primary key are not accepted.
|
||||
snapshot!(response,
|
||||
@ -1705,7 +1705,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
|
||||
let (response, code) = index.update_documents(documents, Some("id")).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(response,
|
||||
@r###"
|
||||
@ -1741,7 +1741,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
|
||||
let (response, code) = index.update_documents(documents, Some("id")).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(response,
|
||||
@r###"
|
||||
@ -1790,7 +1790,7 @@ async fn add_documents_with_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
let response = index.wait_task(task.uid()).await.succeeded();
|
||||
let response = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r#"
|
||||
{
|
||||
@ -1914,7 +1914,7 @@ async fn update_documents_with_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
let response = index.wait_task(task.uid()).await.succeeded();
|
||||
let response = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r#"
|
||||
{
|
||||
@ -1983,7 +1983,7 @@ async fn update_documents_with_geo_field() {
|
||||
}
|
||||
]);
|
||||
let (task, _status_code) = index.update_documents(updated_documents, None).await;
|
||||
let response = index.wait_task(task.uid()).await.succeeded();
|
||||
let response = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
@ -2097,7 +2097,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }),
|
||||
@ -2135,7 +2135,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2173,7 +2173,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2211,7 +2211,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2249,7 +2249,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2287,7 +2287,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2325,7 +2325,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2363,7 +2363,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2401,7 +2401,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2439,7 +2439,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2477,7 +2477,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2515,7 +2515,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@ -2556,7 +2556,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = index.wait_task(response.uid()).await.failed();
|
||||
let response = server.wait_task(response.uid()).await.failed();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
@ -2593,7 +2593,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = index.wait_task(response.uid()).await.failed();
|
||||
let response = server.wait_task(response.uid()).await.failed();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
@ -2630,7 +2630,7 @@ async fn add_documents_invalid_geo_field() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = index.wait_task(response.uid()).await.failed();
|
||||
let response = server.wait_task(response.uid()).await.failed();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
@ -2674,7 +2674,7 @@ async fn add_invalid_geo_and_then_settings() {
|
||||
]);
|
||||
let (ret, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let ret = index.wait_task(ret.uid()).await.succeeded();
|
||||
let ret = server.wait_task(ret.uid()).await.succeeded();
|
||||
snapshot!(ret, @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@ -2697,7 +2697,7 @@ async fn add_invalid_geo_and_then_settings() {
|
||||
|
||||
let (ret, code) = index.update_settings(json!({ "sortableAttributes": ["_geo"] })).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let ret = index.wait_task(ret.uid()).await.failed();
|
||||
let ret = server.wait_task(ret.uid()).await.failed();
|
||||
snapshot!(ret, @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@ -2765,7 +2765,7 @@ async fn error_primary_key_inference() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
@ -2806,7 +2806,7 @@ async fn error_primary_key_inference() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
@ -2845,7 +2845,7 @@ async fn error_primary_key_inference() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
@ -2884,12 +2884,12 @@ async fn add_documents_with_primary_key_twice() {
|
||||
]);
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents.clone(), Some("title")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) = index.get_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents, Some("title")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) = index.get_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
@ -2922,7 +2922,7 @@ async fn batch_several_documents_addition() {
|
||||
// wait first batch of documents to finish
|
||||
let finished_tasks = futures::future::join_all(waiter).await;
|
||||
for (task, _code) in finished_tasks {
|
||||
index.wait_task(task.uid()).await;
|
||||
server.wait_task(task.uid()).await;
|
||||
}
|
||||
|
||||
// run a second completely failing batch
|
||||
@ -2936,7 +2936,7 @@ async fn batch_several_documents_addition() {
|
||||
// wait second batch of documents to finish
|
||||
let finished_tasks = futures::future::join_all(waiter).await;
|
||||
for (task, _code) in finished_tasks {
|
||||
index.wait_task(task.uid()).await;
|
||||
server.wait_task(task.uid()).await;
|
||||
}
|
||||
|
||||
let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await;
|
||||
|
@ -5,11 +5,12 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_document_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -19,7 +20,7 @@ async fn delete_one_unexisting_document() {
|
||||
index.create(None).await;
|
||||
let (response, code) = index.delete_document(0).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -28,10 +29,10 @@ async fn delete_one_document() {
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) =
|
||||
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, status_code) = index.delete_document(0).await;
|
||||
assert_eq!(status_code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (_response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 404);
|
||||
@ -44,7 +45,7 @@ async fn clear_all_documents_unexisting_index() {
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -57,11 +58,11 @@ async fn clear_all_documents() {
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let _update = server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
@ -72,11 +73,11 @@ async fn clear_all_documents_empty_index() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let _update = server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
@ -95,7 +96,7 @@ async fn error_delete_batch_unexisting_index() {
|
||||
});
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await.failed();
|
||||
let response = server.wait_task(task.uid()).await.failed();
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
@ -104,11 +105,11 @@ async fn delete_batch() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, code) = index.delete_batch(vec![1, 0]).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let _update = server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
@ -120,11 +121,11 @@ async fn delete_no_document_batch() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.delete_batch(vec![]).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
|
||||
let _update = index.wait_task(response.uid()).await.succeeded();
|
||||
let _update = server.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 3);
|
||||
@ -146,7 +147,7 @@ async fn delete_document_by_filter() {
|
||||
Some("id"),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (stats, _) = index.stats().await;
|
||||
snapshot!(json_string!(stats, {
|
||||
@ -180,7 +181,7 @@ async fn delete_document_by_filter() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@ -253,7 +254,7 @@ async fn delete_document_by_filter() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@ -328,7 +329,7 @@ async fn delete_document_by_complex_filter() {
|
||||
Some("id"),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index
|
||||
.delete_document_by_filter(
|
||||
json!({ "filter": ["color != red", "color != green", "color EXISTS"] }),
|
||||
@ -345,7 +346,7 @@ async fn delete_document_by_complex_filter() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@ -404,7 +405,7 @@ async fn delete_document_by_complex_filter() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
|
@ -23,7 +23,7 @@ async fn error_get_unexisting_document() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get_document(1, None).await;
|
||||
|
||||
@ -43,7 +43,7 @@ async fn get_document() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
@ -52,7 +52,7 @@ async fn get_document() {
|
||||
]);
|
||||
let (task, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
@ -276,7 +276,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@ -293,7 +293,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
||||
]);
|
||||
let (task, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get_document(0, Some(json!({ "fields": ["content"] }))).await;
|
||||
assert_eq!(code, 200);
|
||||
@ -369,7 +369,7 @@ async fn get_document_by_filter() {
|
||||
Some("id"),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.fetch_documents(json!({})).await;
|
||||
let (response2, code2) = index.get_all_documents_raw("").await;
|
||||
@ -525,7 +525,7 @@ async fn get_document_by_ids() {
|
||||
Some("id"),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.fetch_documents(json!({
|
||||
@ -651,7 +651,7 @@ async fn get_document_invalid_ids() {
|
||||
Some("id"),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.fetch_documents(json!({"ids": ["0", "illegal/docid"] })).await;
|
||||
let (response2, code2) = index.get_all_documents_raw("?ids=0,illegal/docid").await;
|
||||
@ -683,7 +683,7 @@ async fn get_document_not_found_ids() {
|
||||
Some("id"),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.fetch_documents(json!({"ids": ["0", 3, 42] })).await;
|
||||
let (response2, code2) = index.get_all_documents_raw("?ids=0,3,42").await;
|
||||
@ -726,7 +726,7 @@ async fn get_document_by_ids_and_filter() {
|
||||
Some("id"),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.fetch_documents(json!({"ids": [2], "filter": "color = blue" })).await;
|
||||
@ -854,7 +854,7 @@ async fn get_document_with_vectors() {
|
||||
]);
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// by default you shouldn't see the `_vectors` object
|
||||
let (documents, _code) = index.get_all_documents(Default::default()).await;
|
||||
|
@ -34,7 +34,7 @@ async fn document_update_with_primary_key() {
|
||||
let (response, code) = index.update_documents(documents, Some("primary")).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get_task(response.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
@ -63,7 +63,7 @@ async fn update_document() {
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@ -75,7 +75,7 @@ async fn update_document() {
|
||||
let (response, code) = index.update_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "response: {}", response);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get_task(response.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
@ -107,7 +107,7 @@ async fn update_document_gzip_encoded() {
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@ -119,7 +119,7 @@ async fn update_document_gzip_encoded() {
|
||||
let (response, code) = index.update_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "response: {}", response);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get_task(response.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
@ -142,7 +142,7 @@ async fn update_larger_dataset() {
|
||||
let index = server.unique_index();
|
||||
let documents = serde_json::from_str(include_str!("../assets/test_set.json")).unwrap();
|
||||
let (task, _code) = index.update_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_task(task.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
@ -166,7 +166,7 @@ async fn error_update_documents_bad_document_id() {
|
||||
}
|
||||
]);
|
||||
let (task, _code) = index.update_documents(documents, None).await;
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
@ -194,7 +194,7 @@ async fn error_update_documents_missing_document_id() {
|
||||
}
|
||||
]);
|
||||
let (task, _code) = index.update_documents(documents, None).await;
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
@ -219,7 +219,7 @@ async fn update_faceted_document() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents: Vec<_> = (0..1000)
|
||||
.map(|id| {
|
||||
@ -233,7 +233,7 @@ async fn update_faceted_document() {
|
||||
let (response, code) = index.add_documents(documents.into(), None).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@ -245,7 +245,7 @@ async fn update_faceted_document() {
|
||||
let (response, code) = index.update_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "response: {}", response);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"limit": 10}), |response, code| {
|
||||
|
@ -2188,7 +2188,8 @@ async fn import_dump_v6_containing_experimental_features() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -2314,7 +2315,8 @@ async fn import_dump_v6_containing_batches_and_enqueued_tasks() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -2420,7 +2422,8 @@ async fn generate_and_import_dump_containing_vectors() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
|
||||
|
@ -25,7 +25,8 @@ async fn experimental_features() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -41,7 +42,8 @@ async fn experimental_features() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -57,7 +59,8 @@ async fn experimental_features() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -74,7 +77,8 @@ async fn experimental_features() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -91,7 +95,8 @@ async fn experimental_features() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@ -115,7 +120,8 @@ async fn experimental_feature_metrics() {
|
||||
"network": false,
|
||||
"getTaskDocumentsRoute": false,
|
||||
"compositeEmbedders": false,
|
||||
"chatCompletions": false
|
||||
"chatCompletions": false,
|
||||
"multimodal": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -162,7 +168,7 @@ async fn errors() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`",
|
||||
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`, `multimodal`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
|
@ -17,7 +17,7 @@ async fn create_index_no_primary_key() {
|
||||
|
||||
assert_eq!(response["status"], "enqueued");
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
@ -34,7 +34,7 @@ async fn create_index_with_gzip_encoded_request() {
|
||||
|
||||
assert_eq!(response["status"], "enqueued");
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
@ -83,7 +83,7 @@ async fn create_index_with_zlib_encoded_request() {
|
||||
|
||||
assert_eq!(response["status"], "enqueued");
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
@ -100,7 +100,7 @@ async fn create_index_with_brotli_encoded_request() {
|
||||
|
||||
assert_eq!(response["status"], "enqueued");
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
@ -117,7 +117,7 @@ async fn create_index_with_primary_key() {
|
||||
|
||||
assert_eq!(response["status"], "enqueued");
|
||||
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "indexCreation");
|
||||
@ -132,7 +132,7 @@ async fn create_index_with_invalid_primary_key() {
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.add_documents(documents, Some("title")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(response.uid()).await.failed();
|
||||
server.wait_task(response.uid()).await.failed();
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
@ -142,7 +142,7 @@ async fn create_index_with_invalid_primary_key() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(response.uid()).await.failed();
|
||||
server.wait_task(response.uid()).await.failed();
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
@ -181,7 +181,7 @@ async fn error_create_existing_index() {
|
||||
|
||||
let (task, _) = index.create(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
let msg = format!(
|
||||
"Index `{}` already exists.",
|
||||
task["indexUid"].as_str().expect("indexUid should exist").trim_matches('"')
|
||||
|
@ -9,7 +9,7 @@ async fn create_and_delete_index() {
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
assert_eq!(index.get().await.1, 200);
|
||||
|
||||
@ -17,18 +17,19 @@ async fn create_and_delete_index() {
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
assert_eq!(index.get().await.1, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_delete_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.delete_index_fail().await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `DOES_NOT_EXISTS` not found.",
|
||||
@ -37,7 +38,7 @@ async fn error_delete_unexisting_index() {
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
@ -58,7 +59,7 @@ async fn loop_delete_add_documents() {
|
||||
}
|
||||
|
||||
for task in tasks {
|
||||
let response = index.wait_task(task).await.succeeded();
|
||||
let response = server.wait_task(task).await.succeeded();
|
||||
assert_eq!(response["status"], "succeeded", "{}", response);
|
||||
}
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ async fn create_and_get_index() {
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
|
||||
|
@ -9,7 +9,7 @@ async fn stats() {
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.stats().await;
|
||||
|
||||
@ -32,7 +32,7 @@ async fn stats() {
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.stats().await;
|
||||
|
||||
|
@ -12,10 +12,10 @@ async fn update_primary_key() {
|
||||
let (task, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = index.update(Some("primary")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
|
||||
@ -42,12 +42,12 @@ async fn create_and_update_with_different_encoding() {
|
||||
let (create_task, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(create_task.uid()).await.succeeded();
|
||||
server.wait_task(create_task.uid()).await.succeeded();
|
||||
|
||||
let index = index.with_encoder(Encoder::Brotli);
|
||||
let (task, _status_code) = index.update(Some("primary")).await;
|
||||
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -58,23 +58,24 @@ async fn update_nothing() {
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(task1.uid()).await.succeeded();
|
||||
server.wait_task(task1.uid()).await.succeeded();
|
||||
|
||||
let (task2, code) = index.update(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(task2.uid()).await.succeeded();
|
||||
server.wait_task(task2.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_existing_primary_key() {
|
||||
let server = Server::new_shared();
|
||||
let index = shared_index_with_documents().await;
|
||||
|
||||
let (update_task, code) = index.update_index_fail(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
let response = index.wait_task(update_task.uid()).await.failed();
|
||||
let response = server.wait_task(update_task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
|
||||
@ -88,12 +89,13 @@ async fn error_update_existing_primary_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await.failed();
|
||||
let response = server.wait_task(task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
|
@ -152,7 +152,7 @@ async fn distinct_search_with_offset_no_ranking() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
fn get_hits(response: &Value) -> Vec<&str> {
|
||||
let hits_array = response["hits"].as_array().unwrap();
|
||||
@ -211,7 +211,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
fn get_hits(response: &Value) -> Vec<&str> {
|
||||
let hits_array = response["hits"].as_array().unwrap();
|
||||
@ -281,7 +281,7 @@ async fn distinct_at_search_time() {
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
|
||||
let task = index.wait_task(task.uid()).await.succeeded();
|
||||
let task = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(task, name: "succeed");
|
||||
|
||||
fn get_hits(response: &Value) -> Vec<String> {
|
||||
|
@ -425,7 +425,7 @@ async fn search_non_filterable_facets() {
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||
// Wait for the settings update to complete
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@ -456,7 +456,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
|
||||
let index = server.unique_index();
|
||||
let (response, _code) =
|
||||
index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@ -486,7 +486,7 @@ async fn search_non_filterable_facets_no_filterable() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.update_settings(json!({"filterableAttributes": []})).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@ -517,7 +517,7 @@ async fn search_non_filterable_facets_multiple_facets() {
|
||||
let index = server.unique_index();
|
||||
let (response, _uid) =
|
||||
index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.search_post(json!({"facets": ["doggo", "neko"]})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@ -1001,7 +1001,7 @@ async fn sort_geo_reserved_attribute() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
|
||||
@ -1028,7 +1028,7 @@ async fn sort_reserved_attribute() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
|
||||
@ -1054,7 +1054,7 @@ async fn sort_unsortable_attribute() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": format!("Index `{}`: Attribute `title` is not sortable. Available sortable attributes are: `id`.", index.uid),
|
||||
@ -1081,7 +1081,7 @@ async fn sort_invalid_syntax() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
|
||||
@ -1112,7 +1112,7 @@ async fn sort_unset_ranking_rule() {
|
||||
json!({"sortableAttributes": ["title"], "rankingRules": ["proximity", "exactness"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": format!("Index `{}`: You must specify where `sort` is listed in the rankingRules setting to use the sort parameter at search time.", index.uid),
|
||||
@ -1199,7 +1199,7 @@ async fn distinct_at_search_time() {
|
||||
let index = server.unique_index();
|
||||
let (response, _code) =
|
||||
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
|
||||
@ -1214,7 +1214,7 @@ async fn distinct_at_search_time() {
|
||||
"###);
|
||||
|
||||
let (task, _) = index.update_settings_filterable_attributes(json!(["color", "machin"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
|
||||
@ -1229,7 +1229,7 @@ async fn distinct_at_search_time() {
|
||||
"###);
|
||||
|
||||
let (task, _) = index.update_settings_displayed_attributes(json!(["color"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
|
||||
|
@ -50,11 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.facet_search(query.clone()).await;
|
||||
insta::allow_duplicates! {
|
||||
@ -70,11 +70,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.facet_search(query.clone()).await;
|
||||
insta::allow_duplicates! {
|
||||
@ -94,7 +94,7 @@ async fn simple_facet_search() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -207,10 +207,10 @@ async fn simple_facet_search_on_movies() {
|
||||
let (response, code) =
|
||||
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
|
||||
@ -228,7 +228,7 @@ async fn advanced_facet_search() {
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
index.update_settings_typo_tolerance(json!({ "enabled": false })).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
|
||||
@ -252,7 +252,7 @@ async fn more_advanced_facet_search() {
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
index.update_settings_typo_tolerance(json!({ "disableOnWords": ["adventre"] })).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
|
||||
@ -276,7 +276,7 @@ async fn simple_facet_search_with_max_values() {
|
||||
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -298,7 +298,7 @@ async fn simple_facet_search_by_count_with_max_values() {
|
||||
.await;
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -314,7 +314,7 @@ async fn non_filterable_facet_search_error() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -333,7 +333,7 @@ async fn facet_search_dont_support_words() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
|
||||
@ -351,7 +351,7 @@ async fn simple_facet_search_with_sort_by_count() {
|
||||
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -370,7 +370,7 @@ async fn add_documents_and_deactivate_facet_search() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": false,
|
||||
@ -378,7 +378,7 @@ async fn add_documents_and_deactivate_facet_search() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -406,10 +406,10 @@ async fn deactivate_facet_search_and_add_documents() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -437,10 +437,10 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -448,7 +448,7 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -469,10 +469,10 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -480,7 +480,7 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
@ -920,13 +920,13 @@ async fn distinct_facet_search_on_movies() {
|
||||
let (response, code) =
|
||||
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;
|
||||
|
@ -90,7 +90,7 @@ async fn search_with_contains_filter() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (request, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(request.uid()).await.succeeded();
|
||||
server.wait_task(request.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({
|
||||
@ -257,7 +257,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
|
||||
let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index
|
||||
.update_settings(json!({"filterableAttributes": [{
|
||||
@ -269,7 +269,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
@ -334,7 +334,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
@ -445,7 +445,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter returns an error
|
||||
index
|
||||
@ -544,7 +544,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
|
@ -26,7 +26,7 @@ async fn search_formatted_from_sdk() {
|
||||
{ "id": 42, "title": "The Hitchhiker's Guide to the Galaxy" }
|
||||
]);
|
||||
let (response, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -65,7 +65,7 @@ async fn formatted_contain_wildcard() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
index.search(json!({ "q": "pésti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }),
|
||||
|response, code|
|
||||
@ -398,7 +398,7 @@ async fn displayedattr_2_smol() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
|
||||
@ -596,7 +596,7 @@ async fn test_cjk_highlight() {
|
||||
{ "id": 1, "title": "大卫到了扫罗那里" },
|
||||
]);
|
||||
let (response, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "で", "attributesToHighlight": ["title"]}), |response, code| {
|
||||
|
@ -17,11 +17,11 @@ async fn index_with_documents_user_provided<'a>(
|
||||
"dimensions": 2}}} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}
|
||||
|
||||
@ -37,11 +37,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Val
|
||||
}}} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}
|
||||
|
||||
@ -499,7 +499,7 @@ async fn query_combination() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Invalid request: missing `hybrid` parameter when `vector` is present.",
|
||||
"message": "Invalid request: missing `hybrid` parameter when `vector` or `media` are present.",
|
||||
"code": "missing_search_hybrid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#missing_search_hybrid"
|
||||
@ -543,7 +543,7 @@ async fn distinct_is_applied() {
|
||||
|
||||
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// pure keyword
|
||||
let (response, code) = index
|
||||
@ -633,7 +633,7 @@ async fn retrieve_vectors() {
|
||||
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@ -683,7 +683,7 @@ async fn retrieve_vectors() {
|
||||
let (response, code) =
|
||||
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
|
@ -99,7 +99,7 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// english
|
||||
index
|
||||
@ -215,7 +215,7 @@ async fn force_locales() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// chinese detection
|
||||
index
|
||||
@ -293,7 +293,7 @@ async fn force_locales_with_pattern() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// chinese detection
|
||||
index
|
||||
@ -369,7 +369,7 @@ async fn force_locales_with_pattern_nested() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// chinese
|
||||
index
|
||||
@ -444,7 +444,7 @@ async fn force_different_locales_with_pattern() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// force chinese
|
||||
index
|
||||
@ -522,7 +522,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// auto infer any language
|
||||
index
|
||||
@ -596,7 +596,7 @@ async fn auto_infer_locales_at_search() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -695,7 +695,7 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// chinese
|
||||
index
|
||||
@ -773,7 +773,7 @@ async fn settings_change() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
"searchableAttributes": ["document_en", "document_ja", "document_zh"],
|
||||
@ -792,7 +792,7 @@ async fn settings_change() {
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// chinese
|
||||
index
|
||||
@ -855,7 +855,7 @@ async fn settings_change() {
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// chinese
|
||||
index
|
||||
@ -910,7 +910,7 @@ async fn invalid_locales() {
|
||||
)
|
||||
.await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.search_post(json!({"q": "Atta", "locales": ["invalid"]})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@ -1028,7 +1028,7 @@ async fn simple_facet_search() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index
|
||||
.facet_search(json!({"facetName": "name_zh", "facetQuery": "進撃", "locales": ["cmn"]}))
|
||||
@ -1090,7 +1090,7 @@ async fn facet_search_with_localized_attributes() {
|
||||
}
|
||||
"###);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index
|
||||
.facet_search(json!({"facetName": "name_zh", "facetQuery": "进击", "locales": ["cmn"]}))
|
||||
@ -1159,7 +1159,7 @@ async fn swedish_search() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(_response.uid()).await.succeeded();
|
||||
server.wait_task(_response.uid()).await.succeeded();
|
||||
|
||||
// infer swedish
|
||||
index
|
||||
@ -1280,7 +1280,7 @@ async fn german_search() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(_response.uid()).await.succeeded();
|
||||
server.wait_task(_response.uid()).await.succeeded();
|
||||
|
||||
// infer swedish
|
||||
index
|
||||
|
@ -9,7 +9,7 @@ async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value)
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
}
|
||||
|
||||
|
@ -38,11 +38,11 @@ async fn test_settings_documents_indexing_swapping_and_search(
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index.search(query.clone(), test.clone()).await;
|
||||
|
||||
@ -51,11 +51,11 @@ async fn test_settings_documents_indexing_swapping_and_search(
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index.search(query.clone(), test.clone()).await;
|
||||
}
|
||||
@ -104,7 +104,7 @@ async fn bug_5547() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let mut documents = Vec::new();
|
||||
for i in 0..65_535 {
|
||||
@ -112,7 +112,7 @@ async fn bug_5547() {
|
||||
}
|
||||
|
||||
let (response, _code) = index.add_documents(json!(documents), Some("id")).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.search_post(json!({"q": "title"})).await;
|
||||
assert_eq!(code, 200);
|
||||
snapshot!(response["hits"], @r###"[{"id":0,"title":"title0"},{"id":1,"title":"title1"},{"id":10,"title":"title10"},{"id":100,"title":"title100"},{"id":101,"title":"title101"},{"id":102,"title":"title102"},{"id":103,"title":"title103"},{"id":104,"title":"title104"},{"id":105,"title":"title105"},{"id":106,"title":"title106"},{"id":107,"title":"title107"},{"id":108,"title":"title108"},{"id":1000,"title":"title1000"},{"id":1001,"title":"title1001"},{"id":1002,"title":"title1002"},{"id":1003,"title":"title1003"},{"id":1004,"title":"title1004"},{"id":1005,"title":"title1005"},{"id":1006,"title":"title1006"},{"id":1007,"title":"title1007"}]"###);
|
||||
@ -131,7 +131,7 @@ async fn search_with_stop_word() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// prefix search
|
||||
index
|
||||
@ -196,7 +196,7 @@ async fn search_with_typo_settings() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "287947" }), |response, code| {
|
||||
@ -228,7 +228,7 @@ async fn phrase_search_with_stop_word() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "how \"to\" train \"the" }), |response, code| {
|
||||
@ -308,11 +308,11 @@ async fn negative_special_cases_search() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({"synonyms": { "escape": ["gläss"] }})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass
|
||||
index
|
||||
@ -338,7 +338,7 @@ async fn test_kanji_language_detection() {
|
||||
{ "id": 2, "title": "הַשּׁוּעָל הַמָּהִיר (״הַחוּם״) לֹא יָכוֹל לִקְפֹּץ 9.94 מֶטְרִים, נָכוֹן? ברר, 1.5°C- בַּחוּץ!" }
|
||||
]);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "東京"}), |response, code| {
|
||||
@ -361,10 +361,10 @@ async fn test_thai_language() {
|
||||
{ "id": 2, "title": "สบู่สมุนไพรฝางแดงผสมว่านหางจรเข้ 100 กรัม จำนวน 6 ก้อน" }
|
||||
]);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = index.update_settings(json!({"rankingRules": ["exactness"]})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "สบู"}), |response, code| {
|
||||
@ -586,7 +586,7 @@ async fn displayed_attributes() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
|
||||
@ -601,7 +601,7 @@ async fn placeholder_search_is_hard_limited() {
|
||||
|
||||
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
|
||||
let (task, _status_code) = index.add_documents(documents.into(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -630,7 +630,7 @@ async fn placeholder_search_is_hard_limited() {
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -665,7 +665,7 @@ async fn search_is_hard_limited() {
|
||||
|
||||
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
|
||||
let (task, _status_code) = index.add_documents(documents.into(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -696,7 +696,7 @@ async fn search_is_hard_limited() {
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -735,7 +735,7 @@ async fn faceting_max_values_per_facet() {
|
||||
|
||||
let documents: Vec<_> = (0..10_000).map(|id| json!({ "id": id, "number": id * 10 })).collect();
|
||||
let (task, _status_code) = index.add_documents(json!(documents), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -752,7 +752,7 @@ async fn faceting_max_values_per_facet() {
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -1033,7 +1033,7 @@ async fn test_degraded_score_details() {
|
||||
index.add_documents(json!(documents), None).await;
|
||||
// We can't really use anything else than 0ms here; otherwise, the test will get flaky.
|
||||
let (res, _code) = index.update_settings(json!({ "searchCutoffMs": 0 })).await;
|
||||
index.wait_task(res.uid()).await.succeeded();
|
||||
server.wait_task(res.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -1126,7 +1126,7 @@ async fn camelcased_words() {
|
||||
{ "id": 4, "title": "testab" },
|
||||
]);
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "deLonghi"}), |response, code| {
|
||||
@ -1345,12 +1345,12 @@ async fn simple_search_with_strange_synonyms() {
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
|
||||
let r = index.wait_task(task.uid()).await.succeeded();
|
||||
let r = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(r["status"], @r###""succeeded""###);
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "How to train"}), |response, code| {
|
||||
@ -1416,11 +1416,11 @@ async fn change_attributes_settings() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(json!(documents), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task,_status_code) =
|
||||
index.update_settings(json!({ "searchableAttributes": ["father", "mother", "doggos"], "filterableAttributes": ["doggos"] })).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// search
|
||||
index
|
||||
@ -1923,7 +1923,7 @@ async fn change_facet_casing() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index
|
||||
.add_documents(
|
||||
@ -1936,7 +1936,7 @@ async fn change_facet_casing() {
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index
|
||||
.add_documents(
|
||||
@ -1949,7 +1949,7 @@ async fn change_facet_casing() {
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({ "facets": ["dog"] }), |response, code| {
|
||||
@ -2062,7 +2062,7 @@ async fn simple_search_changing_unrelated_settings() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "Dragon"}), |response, code| {
|
||||
@ -2084,7 +2084,7 @@ async fn simple_search_changing_unrelated_settings() {
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({ "filterableAttributes": ["title"] })).await;
|
||||
let r = index.wait_task(task.uid()).await.succeeded();
|
||||
let r = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(r["status"], @r###""succeeded""###);
|
||||
|
||||
index
|
||||
@ -2106,7 +2106,7 @@ async fn simple_search_changing_unrelated_settings() {
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings(json!({ "filterableAttributes": [] })).await;
|
||||
let r = index.wait_task(task.uid()).await.succeeded();
|
||||
let r = server.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(r["status"], @r###""succeeded""###);
|
||||
|
||||
index
|
||||
|
@ -21,7 +21,7 @@ pub async fn shared_movies_index() -> &'static Index<'static, Shared> {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = movies_index.add_documents(documents, None).await;
|
||||
movies_index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = movies_index
|
||||
.update_settings(json!({
|
||||
@ -37,7 +37,7 @@ pub async fn shared_movies_index() -> &'static Index<'static, Shared> {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
movies_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
movies_index.to_shared()
|
||||
})
|
||||
.await
|
||||
@ -52,7 +52,7 @@ pub async fn shared_batman_index() -> &'static Index<'static, Shared> {
|
||||
|
||||
let documents = SCORE_DOCUMENTS.clone();
|
||||
let (response, _code) = batman_index.add_documents(documents, None).await;
|
||||
batman_index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = batman_index
|
||||
.update_settings(json!({
|
||||
@ -68,7 +68,7 @@ pub async fn shared_batman_index() -> &'static Index<'static, Shared> {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
batman_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
batman_index.to_shared()
|
||||
})
|
||||
.await
|
||||
@ -1085,14 +1085,14 @@ async fn federation_filter() {
|
||||
|
||||
let documents = FRUITS_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = index
|
||||
.update_settings(
|
||||
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server
|
||||
.multi_search(json!({"federation": {}, "queries": [
|
||||
@ -1152,7 +1152,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = index
|
||||
.update_settings(json!({
|
||||
@ -1167,7 +1167,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// two identical placeholder searches should have all results from the first query
|
||||
let (response, code) = server
|
||||
@ -1365,7 +1365,7 @@ async fn federation_sort_same_indexes_same_criterion_opposite_direction() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = index
|
||||
.update_settings(json!({
|
||||
@ -1380,7 +1380,7 @@ async fn federation_sort_same_indexes_same_criterion_opposite_direction() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// two identical placeholder searches should have all results from the first query
|
||||
let (response, code) = server
|
||||
@ -1424,7 +1424,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = index
|
||||
.update_settings(json!({
|
||||
@ -1439,7 +1439,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// return mothers and fathers ordered across fields.
|
||||
let (response, code) = server
|
||||
@ -1638,7 +1638,7 @@ async fn federation_sort_same_indexes_different_criterion_opposite_direction() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = index
|
||||
.update_settings(json!({
|
||||
@ -1653,7 +1653,7 @@ async fn federation_sort_same_indexes_different_criterion_opposite_direction() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// two identical placeholder searches should have all results from the first query
|
||||
let (response, code) = server
|
||||
@ -3048,14 +3048,14 @@ async fn federation_invalid_weight() {
|
||||
|
||||
let documents = FRUITS_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = index
|
||||
.update_settings(
|
||||
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server
|
||||
.multi_search(json!({"federation": {}, "queries": [
|
||||
@ -3082,14 +3082,14 @@ async fn federation_null_weight() {
|
||||
|
||||
let documents = FRUITS_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = index
|
||||
.update_settings(
|
||||
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server
|
||||
.multi_search(json!({"federation": {}, "queries": [
|
||||
@ -3150,7 +3150,7 @@ async fn federation_federated_contains_pagination() {
|
||||
|
||||
let documents = FRUITS_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// fail when a federated query contains "limit"
|
||||
let (response, code) = server
|
||||
@ -3230,11 +3230,11 @@ async fn federation_federated_contains_facets() {
|
||||
)
|
||||
.await;
|
||||
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let documents = FRUITS_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// empty facets are actually OK
|
||||
let (response, code) = server
|
||||
@ -3314,7 +3314,7 @@ async fn federation_non_faceted_for_an_index() {
|
||||
)
|
||||
.await;
|
||||
|
||||
fruits_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let fruits_no_name_index = server.unique_index_with_prefix("fruits-no-name");
|
||||
|
||||
@ -3324,18 +3324,18 @@ async fn federation_non_faceted_for_an_index() {
|
||||
)
|
||||
.await;
|
||||
|
||||
fruits_no_name_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let fruits_no_facets_index = server.unique_index_with_prefix("fruits-no-facets");
|
||||
|
||||
let (value, _) =
|
||||
fruits_no_facets_index.update_settings(json!({"searchableAttributes": ["name"]})).await;
|
||||
|
||||
fruits_no_facets_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let documents = FRUITS_DOCUMENTS.clone();
|
||||
let (value, _) = fruits_no_facets_index.add_documents(documents, None).await;
|
||||
fruits_no_facets_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// fails
|
||||
let (response, code) = server
|
||||
@ -3435,7 +3435,7 @@ async fn federation_non_federated_contains_federation_option() {
|
||||
|
||||
let documents = FRUITS_DOCUMENTS.clone();
|
||||
let (value, _) = index.add_documents(documents, None).await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// fail when a non-federated query contains "federationOptions"
|
||||
let (response, code) = server
|
||||
@ -3473,12 +3473,12 @@ async fn federation_vector_single_index() {
|
||||
}
|
||||
}}))
|
||||
.await;
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let documents = VECTOR_DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// same embedder
|
||||
let (response, code) = server
|
||||
@ -3670,12 +3670,12 @@ async fn federation_vector_two_indexes() {
|
||||
},
|
||||
}}))
|
||||
.await;
|
||||
vectors_animal_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let documents = VECTOR_DOCUMENTS.clone();
|
||||
let (value, code) = vectors_animal_index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
vectors_animal_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let vectors_sentiment_index = server.unique_index_with_prefix("vectors-sentiment");
|
||||
|
||||
@ -3687,12 +3687,12 @@ async fn federation_vector_two_indexes() {
|
||||
}
|
||||
}}))
|
||||
.await;
|
||||
vectors_sentiment_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let documents = VECTOR_DOCUMENTS.clone();
|
||||
let (value, code) = vectors_sentiment_index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
vectors_sentiment_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server
|
||||
.multi_search(json!({"federation": {}, "queries": [
|
||||
@ -4154,7 +4154,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
|
||||
let documents = SCORE_DOCUMENTS.clone();
|
||||
let (value, _) = batman_2_index.add_documents(documents, None).await;
|
||||
batman_2_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = batman_2_index
|
||||
.update_settings(json!({
|
||||
@ -4170,7 +4170,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
batman_2_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
// return titles ordered across indexes
|
||||
let (response, code) = server
|
||||
@ -4677,7 +4677,7 @@ async fn federation_facets_same_indexes() {
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (value, _) = doggos_index.add_documents(documents, None).await;
|
||||
doggos_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = doggos_index
|
||||
.update_settings(json!({
|
||||
@ -4692,13 +4692,13 @@ async fn federation_facets_same_indexes() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
doggos_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let doggos2_index = server.unique_index_with_prefix("doggos_2");
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (value, _) = doggos2_index.add_documents(documents, None).await;
|
||||
doggos2_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = doggos2_index
|
||||
.update_settings(json!({
|
||||
@ -4713,7 +4713,7 @@ async fn federation_facets_same_indexes() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
doggos2_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server
|
||||
.multi_search(json!({"federation": {
|
||||
@ -4980,7 +4980,7 @@ async fn federation_inconsistent_merge_order() {
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, _) = movies2_index.add_documents(documents, None).await;
|
||||
movies2_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _) = movies2_index
|
||||
.update_settings(json!({
|
||||
@ -4999,7 +4999,7 @@ async fn federation_inconsistent_merge_order() {
|
||||
}
|
||||
}))
|
||||
.await;
|
||||
movies2_index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let batman_index = shared_batman_index().await;
|
||||
|
||||
|
@ -114,14 +114,14 @@ async fn ensure_placeholder_search_hit_count_valid() {
|
||||
}
|
||||
]);
|
||||
let (task, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index
|
||||
.update_settings(
|
||||
json!({ "rankingRules": ["distinct:asc"], "distinctAttribute": "distinct"}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
for page in 0..=4 {
|
||||
index
|
||||
|
@ -9,7 +9,7 @@ async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value)
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
}
|
||||
|
||||
@ -65,7 +65,7 @@ async fn search_no_searchable_attribute_set() {
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -78,7 +78,7 @@ async fn search_no_searchable_attribute_set() {
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -109,7 +109,7 @@ async fn search_on_all_attributes_restricted_set() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
|
||||
@ -194,7 +194,7 @@ async fn word_ranking_rule_order_exact_words() {
|
||||
let (task, _status_code) = index
|
||||
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
@ -360,7 +360,7 @@ async fn search_on_exact_field() {
|
||||
let (response, code) =
|
||||
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
// Searching on an exact attribute should only return the document matching without typo.
|
||||
index
|
||||
.search(json!({"q": "Marvel", "attributesToSearchOn": ["exact"]}), |response, code| {
|
||||
@ -557,7 +557,7 @@ async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) =
|
||||
index.update_settings_searchable_attributes(json!(["details.title"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -595,7 +595,7 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -608,7 +608,7 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
|
@ -7,7 +7,7 @@ async fn set_and_reset_distinct_attribute() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
|
||||
index.wait_task(task1.uid()).await.succeeded();
|
||||
server.wait_task(task1.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
|
||||
@ -15,7 +15,7 @@ async fn set_and_reset_distinct_attribute() {
|
||||
|
||||
let (task2, _status_code) = index.update_settings(json!({ "distinctAttribute": null })).await;
|
||||
|
||||
index.wait_task(task2.uid()).await.succeeded();
|
||||
server.wait_task(task2.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
|
||||
@ -28,7 +28,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_task(update_task1.uid()).await.succeeded();
|
||||
server.wait_task(update_task1.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.get_distinct_attribute().await;
|
||||
|
||||
@ -36,7 +36,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
|
||||
let (update_task2, _status_code) = index.update_distinct_attribute(json!(null)).await;
|
||||
|
||||
index.wait_task(update_task2.uid()).await.succeeded();
|
||||
server.wait_task(update_task2.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.get_distinct_attribute().await;
|
||||
|
||||
|
@ -58,7 +58,7 @@ macro_rules! test_setting_routes {
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let url = format!("/indexes/{}/settings/{}",
|
||||
index.uid,
|
||||
stringify!($setting)
|
||||
@ -209,7 +209,7 @@ async fn get_settings() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
let settings = response.as_object().unwrap();
|
||||
@ -247,6 +247,20 @@ async fn get_settings() {
|
||||
assert_eq!(settings["prefixSearch"], json!("indexingTime"));
|
||||
assert_eq!(settings["facetSearch"], json!(true));
|
||||
assert_eq!(settings["embedders"], json!({}));
|
||||
assert_eq!(settings["synonyms"], json!({}));
|
||||
assert_eq!(
|
||||
settings["typoTolerance"],
|
||||
json!({
|
||||
"enabled": true,
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 5,
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -254,7 +268,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -285,7 +299,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
|
||||
let settings_update_uid = response.uid();
|
||||
|
||||
index.wait_task(settings_update_uid).await.succeeded();
|
||||
server.wait_task(settings_update_uid).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
@ -384,14 +398,14 @@ async fn test_partial_update() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
||||
assert_eq!(response["searchableAttributes"], json!(["*"]));
|
||||
|
||||
let (task, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
@ -406,7 +420,7 @@ async fn error_delete_settings_unexisting_index() {
|
||||
let (task, code) = index.delete_settings().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -424,12 +438,19 @@ async fn reset_all_settings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (update_task,_status_code) = index
|
||||
.update_settings(json!({"displayedAttributes": ["name", "age"], "searchableAttributes": ["name"], "stopWords": ["the"], "filterableAttributes": ["age"], "synonyms": {"puppy": ["dog", "doggo", "potat"] }}))
|
||||
let (update_task, _status_code) = index
|
||||
.update_settings(json!({
|
||||
"displayedAttributes": ["name", "age"],
|
||||
"searchableAttributes": ["name"],
|
||||
"stopWords": ["the"],
|
||||
"filterableAttributes": ["age"],
|
||||
"synonyms": {"puppy": ["dog", "doggo", "potat"] },
|
||||
"typoTolerance": {"disableOnNumbers": true}
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(update_task.uid()).await.succeeded();
|
||||
server.wait_task(update_task.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
|
||||
@ -437,9 +458,22 @@ async fn reset_all_settings() {
|
||||
assert_eq!(response["stopWords"], json!(["the"]));
|
||||
assert_eq!(response["synonyms"], json!({"puppy": ["dog", "doggo", "potat"] }));
|
||||
assert_eq!(response["filterableAttributes"], json!(["age"]));
|
||||
assert_eq!(
|
||||
response["typoTolerance"],
|
||||
json!({
|
||||
"enabled": true,
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 5,
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": true
|
||||
})
|
||||
);
|
||||
|
||||
let (delete_task, _status_code) = index.delete_settings().await;
|
||||
index.wait_task(delete_task.uid()).await.succeeded();
|
||||
server.wait_task(delete_task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
@ -448,6 +482,19 @@ async fn reset_all_settings() {
|
||||
assert_eq!(response["stopWords"], json!([]));
|
||||
assert_eq!(response["filterableAttributes"], json!([]));
|
||||
assert_eq!(response["synonyms"], json!({}));
|
||||
assert_eq!(
|
||||
response["typoTolerance"],
|
||||
json!({
|
||||
"enabled": true,
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 5,
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index.get_document(1, None).await;
|
||||
assert_eq!(code, 200);
|
||||
@ -460,11 +507,11 @@ async fn update_setting_unexisting_index() {
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.update_settings(json!({})).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (_response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
let (task, _status_code) = index.delete_settings().await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -507,7 +554,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.get_distinct_attribute().await;
|
||||
|
||||
@ -515,7 +562,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
|
||||
let (task, _status_code) = index.update_distinct_attribute(json!(null)).await;
|
||||
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.get_distinct_attribute().await;
|
||||
|
||||
@ -540,7 +587,7 @@ async fn granular_filterable_attributes() {
|
||||
{ "attributePatterns": ["default-facet-search"], "features": { "filter": {"equality": true, "comparison": true} } },
|
||||
] })).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200, "{response}");
|
||||
|
@ -30,7 +30,7 @@ async fn attribute_scale_search() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -39,7 +39,7 @@ async fn attribute_scale_search() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [1, 3, 2] instead of [3, 1, 2]
|
||||
// because the attribute scale doesn't make the difference between 1 and 3.
|
||||
@ -103,7 +103,7 @@ async fn attribute_scale_phrase_search() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _code) = index
|
||||
.update_settings(json!({
|
||||
@ -111,7 +111,7 @@ async fn attribute_scale_phrase_search() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [1, 3] instead of [3, 1]
|
||||
// because the attribute scale doesn't make the difference between 1 and 3.
|
||||
@ -171,7 +171,7 @@ async fn word_scale_set_and_reset() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Set and reset the setting ensuring the swap between the 2 settings is applied.
|
||||
let (update_task1, _code) = index
|
||||
@ -180,7 +180,7 @@ async fn word_scale_set_and_reset() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(update_task1.uid()).await.succeeded();
|
||||
server.wait_task(update_task1.uid()).await.succeeded();
|
||||
|
||||
let (update_task2, _code) = index
|
||||
.update_settings(json!({
|
||||
@ -188,7 +188,7 @@ async fn word_scale_set_and_reset() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(update_task2.uid()).await.succeeded();
|
||||
server.wait_task(update_task2.uid()).await.succeeded();
|
||||
|
||||
// [3, 1, 2]
|
||||
index
|
||||
@ -286,7 +286,7 @@ async fn attribute_scale_default_ranking_rules() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -294,7 +294,7 @@ async fn attribute_scale_default_ranking_rules() {
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [3, 1, 2]
|
||||
index
|
||||
|
@ -15,7 +15,7 @@ async fn set_and_reset() {
|
||||
"dictionary": ["J.R.R.", "J. R. R."],
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
snapshot!(json_string!(response["nonSeparatorTokens"]), @r###"
|
||||
@ -45,7 +45,7 @@ async fn set_and_reset() {
|
||||
}))
|
||||
.await;
|
||||
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
snapshot!(json_string!(response["nonSeparatorTokens"]), @"[]");
|
||||
@ -74,7 +74,7 @@ async fn set_and_search() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (add_task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
server.wait_task(add_task.uid()).await.succeeded();
|
||||
|
||||
let (update_task, _code) = index
|
||||
.update_settings(json!({
|
||||
@ -83,7 +83,7 @@ async fn set_and_search() {
|
||||
"dictionary": ["#", "A#", "B#", "C#", "D#", "E#", "F#", "G#"],
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(update_task.uid()).await.succeeded();
|
||||
server.wait_task(update_task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "&", "attributesToHighlight": ["content"]}), |response, code| {
|
||||
@ -228,7 +228,7 @@ async fn advanced_synergies() {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (add_task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
server.wait_task(add_task.uid()).await.succeeded();
|
||||
|
||||
let (update_task, _code) = index
|
||||
.update_settings(json!({
|
||||
@ -243,7 +243,7 @@ async fn advanced_synergies() {
|
||||
}
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(update_task.uid()).await.succeeded();
|
||||
server.wait_task(update_task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "J.R.R.", "attributesToHighlight": ["content"]}), |response, code| {
|
||||
@ -353,7 +353,7 @@ async fn advanced_synergies() {
|
||||
"dictionary": ["J.R.R.", "J. R. R.", "J.K.", "J. K."],
|
||||
}))
|
||||
.await;
|
||||
index.wait_task(_response.uid()).await.succeeded();
|
||||
server.wait_task(_response.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "jk", "attributesToHighlight": ["content"]}), |response, code| {
|
||||
|
@ -97,7 +97,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
@ -108,7 +108,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
@ -119,7 +119,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
|
@ -43,7 +43,7 @@ async fn version_too_old() {
|
||||
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.2");
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.16.0");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
|
||||
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.15.3 is higher than the Meilisearch version 1.15.2. Downgrade is not supported");
|
||||
snapshot!(err, @"Database version 1.16.1 is higher than the Meilisearch version 1.16.0. Downgrade is not supported");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.2"
|
||||
"upgradeTo": "v1.16.0"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.2"
|
||||
"upgradeTo": "v1.16.0"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.2"
|
||||
"upgradeTo": "v1.16.0"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.2"
|
||||
"upgradeTo": "v1.16.0"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.2"
|
||||
"upgradeTo": "v1.16.0"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.2"
|
||||
"upgradeTo": "v1.16.0"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.2"
|
||||
"upgradeTo": "v1.16.0"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.2"
|
||||
"upgradeTo": "v1.16.0"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -1,7 +1,10 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::time::Duration;
|
||||
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use reqwest::IntoUrl;
|
||||
use tokio::sync::mpsc;
|
||||
use wiremock::matchers::{method, path};
|
||||
use wiremock::{Mock, MockServer, Request, ResponseTemplate};
|
||||
|
||||
@ -334,6 +337,41 @@ async fn create_mock_raw() -> (MockServer, Value) {
|
||||
(mock_server, embedder_settings)
|
||||
}
|
||||
|
||||
async fn create_faulty_mock_raw(sender: mpsc::Sender<()>) -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
let count = AtomicUsize::new(0);
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/"))
|
||||
.respond_with(move |_req: &Request| {
|
||||
let count = count.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
||||
|
||||
if count >= 5 {
|
||||
let _ = sender.try_send(());
|
||||
ResponseTemplate::new(500)
|
||||
.set_delay(Duration::from_secs(u64::MAX)) // Make the response hang forever
|
||||
.set_body_string("Service Unavailable")
|
||||
} else {
|
||||
ResponseTemplate::new(500).set_body_string("Service Unavailable")
|
||||
}
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.await;
|
||||
|
||||
let url = mock_server.uri();
|
||||
|
||||
let embedder_settings = json!({
|
||||
"source": "rest",
|
||||
"url": url,
|
||||
"dimensions": 3,
|
||||
"request": "{{text}}",
|
||||
"response": "{{embedding}}",
|
||||
"documentTemplate": "{{doc.name}}"
|
||||
});
|
||||
|
||||
(mock_server, embedder_settings)
|
||||
}
|
||||
|
||||
pub async fn post<T: IntoUrl>(url: T, text: &str) -> reqwest::Result<reqwest::Response> {
|
||||
reqwest::Client::builder().build()?.post(url).json(&json!(text)).send().await
|
||||
}
|
||||
@ -370,13 +408,13 @@ async fn bad_request() {
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
}
|
||||
"###);
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
}
|
||||
"###);
|
||||
|
||||
// A repeat string appears inside a repeated value
|
||||
let (response, code) = index
|
||||
@ -399,7 +437,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.input.input`: \"{{..}}\" appears nested inside of a value that is itself repeated",
|
||||
"message": "Error while generating embeddings: user error: in `request.input.input`: \"{{..}}\" appears nested inside of a value that is itself repeated\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -422,7 +460,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.input.repeat`: \"{{..}}\" appears outside of an array",
|
||||
"message": "Error while generating embeddings: user error: in `request.input.repeat`: \"{{..}}\" appears outside of an array\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -445,7 +483,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #0",
|
||||
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #0\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -468,7 +506,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #2",
|
||||
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #2\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -491,7 +529,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.input[0]`: Expected \"{{text}}\" inside of the repeated value",
|
||||
"message": "Error while generating embeddings: user error: in `request.input[0]`: Expected \"{{text}}\" inside of the repeated value\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -518,7 +556,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{..}}\", but it was already present in `request.input`",
|
||||
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{..}}\", but it was already present in `request.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -539,7 +577,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input`",
|
||||
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -560,7 +598,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.repeated.data[1]`: Found \"{{text}}\", but it was already present in `request.repeated.input`",
|
||||
"message": "Error while generating embeddings: user error: in `request.repeated.data[1]`: Found \"{{text}}\", but it was already present in `request.repeated.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -581,7 +619,7 @@ async fn bad_request() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input[0]` (repeated)",
|
||||
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input[0]` (repeated)\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -882,7 +920,7 @@ async fn bad_settings() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found",
|
||||
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
|
||||
"code": "vector_embedding_error",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
|
||||
@ -2111,3 +2149,71 @@ async fn searchable_reindex() {
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn last_error_stats() {
|
||||
let (sender, mut receiver) = mpsc::channel(10);
|
||||
let (_mock, setting) = create_faulty_mock_raw(sender).await;
|
||||
let server = get_server_vector().await;
|
||||
let index = server.index("doggo");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"embedders": {
|
||||
"rest": setting,
|
||||
},
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let task = server.wait_task(response.uid()).await;
|
||||
snapshot!(task["status"], @r###""succeeded""###);
|
||||
let documents = json!([
|
||||
{"id": 0, "name": "will_return_500"},
|
||||
{"id": 1, "name": "will_error"},
|
||||
{"id": 2, "name": "must_error"},
|
||||
]);
|
||||
let (_value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
// The task will eventually fail, so let's not wait for it.
|
||||
// Let's just wait for the server's signal
|
||||
receiver.recv().await;
|
||||
|
||||
let (response, _code) = index.filtered_batches(&[], &[], &[]).await;
|
||||
snapshot!(json_string!(response["results"][0], {
|
||||
".progress" => "[ignored]",
|
||||
".stats.embedderRequests.total" => "[ignored]",
|
||||
".stats.embedderRequests.failed" => "[ignored]",
|
||||
".startedAt" => "[ignored]"
|
||||
}), @r#"
|
||||
{
|
||||
"uid": 1,
|
||||
"progress": "[ignored]",
|
||||
"details": {
|
||||
"receivedDocuments": 3,
|
||||
"indexedDocuments": null
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"processing": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"doggo": 1
|
||||
},
|
||||
"embedderRequests": {
|
||||
"total": "[ignored]",
|
||||
"failed": "[ignored]",
|
||||
"lastError": "runtime error: received internal error HTTP 500 from embedding server\n - server replied with `Service Unavailable`"
|
||||
}
|
||||
},
|
||||
"duration": null,
|
||||
"startedAt": "[ignored]",
|
||||
"finishedAt": null,
|
||||
"batchStrategy": "batched all enqueued tasks"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
Reference in New Issue
Block a user