Merge branch 'main' into default-key

This commit is contained in:
Mubelotix
2025-07-08 12:21:46 +02:00
192 changed files with 8571 additions and 2120 deletions

View File

@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `*.get`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
"code": "invalid_api_key_actions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"

View File

@ -42,7 +42,7 @@ async fn batch_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View File

@ -465,6 +465,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
skip_index_budget: true,
// Having 2 threads makes the tests way faster
max_indexing_threads: MaxThreads::from_str("2").unwrap(),
experimental_no_edition_2024_for_settings: false,
},
experimental_enable_metrics: false,
..Parser::parse_from(None as Option<&str>)

View File

@ -293,7 +293,7 @@ async fn add_csv_document() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -358,7 +358,7 @@ async fn add_csv_document_with_types() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -434,7 +434,7 @@ async fn add_csv_document_with_custom_delimiter() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -991,7 +991,7 @@ async fn add_documents_no_index_creation() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1068,7 +1068,7 @@ async fn document_addition_with_primary_key() {
}
"#);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
snapshot!(code, @"200 OK");
@ -1120,7 +1120,7 @@ async fn document_addition_with_huge_int_primary_key() {
let (response, code) = index.add_documents(documents, Some("primary")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(response,
@r###"
{
@ -1178,7 +1178,7 @@ async fn replace_document() {
}
"#);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -1190,7 +1190,7 @@ async fn replace_document() {
let (task, code) = index.add_documents(documents, None).await;
snapshot!(code,@"202 Accepted");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
@ -1362,7 +1362,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1399,7 +1399,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.failed();
server.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1436,7 +1436,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.failed();
server.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1478,7 +1478,7 @@ async fn error_add_documents_missing_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1527,7 +1527,7 @@ async fn error_document_field_limit_reached_in_one_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@ -1576,7 +1576,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1611,7 +1611,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1660,7 +1660,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@ -1705,7 +1705,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1741,7 +1741,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1790,7 +1790,7 @@ async fn add_documents_with_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
@ -1914,7 +1914,7 @@ async fn update_documents_with_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
@ -1983,7 +1983,7 @@ async fn update_documents_with_geo_field() {
}
]);
let (task, _status_code) = index.update_documents(updated_documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2097,7 +2097,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }),
@ -2135,7 +2135,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2173,7 +2173,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2211,7 +2211,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2249,7 +2249,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2287,7 +2287,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2325,7 +2325,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2363,7 +2363,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2401,7 +2401,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2439,7 +2439,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2477,7 +2477,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2515,7 +2515,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2556,7 +2556,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2593,7 +2593,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2630,7 +2630,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2674,7 +2674,7 @@ async fn add_invalid_geo_and_then_settings() {
]);
let (ret, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await.succeeded();
let ret = server.wait_task(ret.uid()).await.succeeded();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2697,7 +2697,7 @@ async fn add_invalid_geo_and_then_settings() {
let (ret, code) = index.update_settings(json!({ "sortableAttributes": ["_geo"] })).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await.failed();
let ret = server.wait_task(ret.uid()).await.failed();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2765,7 +2765,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2806,7 +2806,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2845,7 +2845,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2884,12 +2884,12 @@ async fn add_documents_with_primary_key_twice() {
]);
let (task, _status_code) = index.add_documents(documents.clone(), Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
let (task, _status_code) = index.add_documents(documents, Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}
@ -2922,7 +2922,7 @@ async fn batch_several_documents_addition() {
// wait first batch of documents to finish
let finished_tasks = futures::future::join_all(waiter).await;
for (task, _code) in finished_tasks {
index.wait_task(task.uid()).await;
server.wait_task(task.uid()).await;
}
// run a second completely failing batch
@ -2936,7 +2936,7 @@ async fn batch_several_documents_addition() {
// wait second batch of documents to finish
let finished_tasks = futures::future::join_all(waiter).await;
for (task, _code) in finished_tasks {
index.wait_task(task.uid()).await;
server.wait_task(task.uid()).await;
}
let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await;

View File

@ -5,11 +5,12 @@ use crate::json;
#[actix_rt::test]
async fn delete_one_document_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@ -19,7 +20,7 @@ async fn delete_one_unexisting_document() {
index.create(None).await;
let (response, code) = index.delete_document(0).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
}
#[actix_rt::test]
@ -28,10 +29,10 @@ async fn delete_one_document() {
let index = server.unique_index();
let (task, _status_code) =
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, status_code) = index.delete_document(0).await;
assert_eq!(status_code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get_document(0, None).await;
assert_eq!(code, 404);
@ -44,7 +45,7 @@ async fn clear_all_documents_unexisting_index() {
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@ -57,11 +58,11 @@ async fn clear_all_documents() {
None,
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -72,11 +73,11 @@ async fn clear_all_documents_empty_index() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -95,7 +96,7 @@ async fn error_delete_batch_unexisting_index() {
});
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.wait_task(task.uid()).await.failed();
assert_eq!(response["error"], expected_response);
}
@ -104,11 +105,11 @@ async fn delete_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.delete_batch(vec![1, 0]).await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
@ -120,11 +121,11 @@ async fn delete_no_document_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{response}");
let _update = index.wait_task(response.uid()).await.succeeded();
let _update = server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
@ -146,7 +147,7 @@ async fn delete_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (stats, _) = index.stats().await;
snapshot!(json_string!(stats, {
@ -180,7 +181,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -253,7 +254,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -328,7 +329,7 @@ async fn delete_document_by_complex_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.delete_document_by_filter(
json!({ "filter": ["color != red", "color != green", "color EXISTS"] }),
@ -345,7 +346,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -404,7 +405,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",

View File

@ -23,7 +23,7 @@ async fn error_get_unexisting_document() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(1, None).await;
@ -43,7 +43,7 @@ async fn get_document() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
"id": 0,
@ -52,7 +52,7 @@ async fn get_document() {
]);
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, None).await;
assert_eq!(code, 200);
assert_eq!(
@ -276,7 +276,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
@ -293,7 +293,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
]);
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, Some(json!({ "fields": ["content"] }))).await;
assert_eq!(code, 200);
@ -369,7 +369,7 @@ async fn get_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({})).await;
let (response2, code2) = index.get_all_documents_raw("").await;
@ -525,7 +525,7 @@ async fn get_document_by_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.fetch_documents(json!({
@ -651,7 +651,7 @@ async fn get_document_invalid_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({"ids": ["0", "illegal/docid"] })).await;
let (response2, code2) = index.get_all_documents_raw("?ids=0,illegal/docid").await;
@ -683,7 +683,7 @@ async fn get_document_not_found_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({"ids": ["0", 3, 42] })).await;
let (response2, code2) = index.get_all_documents_raw("?ids=0,3,42").await;
@ -726,7 +726,7 @@ async fn get_document_by_ids_and_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.fetch_documents(json!({"ids": [2], "filter": "color = blue" })).await;
@ -854,7 +854,7 @@ async fn get_document_with_vectors() {
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// by default you shouldn't see the `_vectors` object
let (documents, _code) = index.get_all_documents(Default::default()).await;

View File

@ -34,7 +34,7 @@ async fn document_update_with_primary_key() {
let (response, code) = index.update_documents(documents, Some("primary")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -63,7 +63,7 @@ async fn update_document() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -75,7 +75,7 @@ async fn update_document() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -107,7 +107,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -119,7 +119,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -142,7 +142,7 @@ async fn update_larger_dataset() {
let index = server.unique_index();
let documents = serde_json::from_str(include_str!("../assets/test_set.json")).unwrap();
let (task, _code) = index.update_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
assert_eq!(response["type"], "documentAdditionOrUpdate");
@ -166,7 +166,7 @@ async fn error_update_documents_bad_document_id() {
}
]);
let (task, _code) = index.update_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], json!("failed"));
assert_eq!(
response["error"]["message"],
@ -194,7 +194,7 @@ async fn error_update_documents_missing_document_id() {
}
]);
let (task, _code) = index.update_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(
response["error"]["message"],
@ -219,7 +219,7 @@ async fn update_faceted_document() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents: Vec<_> = (0..1000)
.map(|id| {
@ -233,7 +233,7 @@ async fn update_faceted_document() {
let (response, code) = index.add_documents(documents.into(), None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -245,7 +245,7 @@ async fn update_faceted_document() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({"limit": 10}), |response, code| {

View File

@ -2188,7 +2188,8 @@ async fn import_dump_v6_containing_experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -2314,7 +2315,8 @@ async fn import_dump_v6_containing_batches_and_enqueued_tasks() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -2420,7 +2422,8 @@ async fn generate_and_import_dump_containing_vectors() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);

View File

@ -25,7 +25,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -41,7 +42,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -57,7 +59,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -74,7 +77,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -91,7 +95,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
}
@ -115,7 +120,8 @@ async fn experimental_feature_metrics() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -162,7 +168,7 @@ async fn errors() {
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`",
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`, `multimodal`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"

View File

@ -17,7 +17,7 @@ async fn create_index_no_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -34,7 +34,7 @@ async fn create_index_with_gzip_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -83,7 +83,7 @@ async fn create_index_with_zlib_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -100,7 +100,7 @@ async fn create_index_with_brotli_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -117,7 +117,7 @@ async fn create_index_with_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -132,7 +132,7 @@ async fn create_index_with_invalid_primary_key() {
let index = server.unique_index();
let (response, code) = index.add_documents(documents, Some("title")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.failed();
server.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@ -142,7 +142,7 @@ async fn create_index_with_invalid_primary_key() {
let (response, code) = index.add_documents(documents, Some("id")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.failed();
server.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@ -181,7 +181,7 @@ async fn error_create_existing_index() {
let (task, _) = index.create(Some("primary")).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
let msg = format!(
"Index `{}` already exists.",
task["indexUid"].as_str().expect("indexUid should exist").trim_matches('"')

View File

@ -9,7 +9,7 @@ async fn create_and_delete_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
assert_eq!(index.get().await.1, 200);
@ -17,18 +17,19 @@ async fn create_and_delete_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
assert_eq!(index.get().await.1, 404);
}
#[actix_rt::test]
async fn error_delete_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_index_fail().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": "Index `DOES_NOT_EXISTS` not found.",
@ -37,7 +38,7 @@ async fn error_delete_unexisting_index() {
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(response["error"], expected_response);
}
@ -58,7 +59,7 @@ async fn loop_delete_add_documents() {
}
for task in tasks {
let response = index.wait_task(task).await.succeeded();
let response = server.wait_task(task).await.succeeded();
assert_eq!(response["status"], "succeeded", "{}", response);
}
}

View File

@ -12,7 +12,7 @@ async fn create_and_get_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get().await;

View File

@ -9,7 +9,7 @@ async fn stats() {
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.stats().await;
@ -32,7 +32,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.stats().await;

View File

@ -12,10 +12,10 @@ async fn update_primary_key() {
let (task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get().await;
@ -42,12 +42,12 @@ async fn create_and_update_with_different_encoding() {
let (create_task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(create_task.uid()).await.succeeded();
server.wait_task(create_task.uid()).await.succeeded();
let index = index.with_encoder(Encoder::Brotli);
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
@ -58,23 +58,24 @@ async fn update_nothing() {
assert_eq!(code, 202);
index.wait_task(task1.uid()).await.succeeded();
server.wait_task(task1.uid()).await.succeeded();
let (task2, code) = index.update(None).await;
assert_eq!(code, 202);
index.wait_task(task2.uid()).await.succeeded();
server.wait_task(task2.uid()).await.succeeded();
}
#[actix_rt::test]
async fn error_update_existing_primary_key() {
let server = Server::new_shared();
let index = shared_index_with_documents().await;
let (update_task, code) = index.update_index_fail(Some("primary")).await;
assert_eq!(code, 202);
let response = index.wait_task(update_task.uid()).await.failed();
let response = server.wait_task(update_task.uid()).await.failed();
let expected_response = json!({
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
@ -88,12 +89,13 @@ async fn error_update_existing_primary_key() {
#[actix_rt::test]
async fn error_update_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": format!("Index `{}` not found.", index.uid),

View File

@ -152,7 +152,7 @@ async fn distinct_search_with_offset_no_ranking() {
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
fn get_hits(response: &Value) -> Vec<&str> {
let hits_array = response["hits"].as_array().unwrap();
@ -211,7 +211,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
fn get_hits(response: &Value) -> Vec<&str> {
let hits_array = response["hits"].as_array().unwrap();
@ -281,7 +281,7 @@ async fn distinct_at_search_time() {
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
let task = index.wait_task(task.uid()).await.succeeded();
let task = server.wait_task(task.uid()).await.succeeded();
snapshot!(task, name: "succeed");
fn get_hits(response: &Value) -> Vec<String> {

View File

@ -425,7 +425,7 @@ async fn search_non_filterable_facets() {
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
// Wait for the settings update to complete
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@ -456,7 +456,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
let index = server.unique_index();
let (response, _code) =
index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@ -486,7 +486,7 @@ async fn search_non_filterable_facets_no_filterable() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"filterableAttributes": []})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@ -517,7 +517,7 @@ async fn search_non_filterable_facets_multiple_facets() {
let index = server.unique_index();
let (response, _uid) =
index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo", "neko"]})).await;
snapshot!(code, @"400 Bad Request");
@ -1001,7 +1001,7 @@ async fn sort_geo_reserved_attribute() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
@ -1028,7 +1028,7 @@ async fn sort_reserved_attribute() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
@ -1054,7 +1054,7 @@ async fn sort_unsortable_attribute() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: Attribute `title` is not sortable. Available sortable attributes are: `id`.", index.uid),
@ -1081,7 +1081,7 @@ async fn sort_invalid_syntax() {
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
@ -1112,7 +1112,7 @@ async fn sort_unset_ranking_rule() {
json!({"sortableAttributes": ["title"], "rankingRules": ["proximity", "exactness"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: You must specify where `sort` is listed in the rankingRules setting to use the sort parameter at search time.", index.uid),
@ -1199,7 +1199,7 @@ async fn distinct_at_search_time() {
let index = server.unique_index();
let (response, _code) =
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
@ -1214,7 +1214,7 @@ async fn distinct_at_search_time() {
"###);
let (task, _) = index.update_settings_filterable_attributes(json!(["color", "machin"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
@ -1229,7 +1229,7 @@ async fn distinct_at_search_time() {
"###);
let (task, _) = index.update_settings_displayed_attributes(json!(["color"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;

View File

@ -50,11 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@ -70,11 +70,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@ -94,7 +94,7 @@ async fn simple_facet_search() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -207,10 +207,10 @@ async fn simple_facet_search_on_movies() {
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
@ -228,7 +228,7 @@ async fn advanced_facet_search() {
index.update_settings_filterable_attributes(json!(["genres"])).await;
index.update_settings_typo_tolerance(json!({ "enabled": false })).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
@ -252,7 +252,7 @@ async fn more_advanced_facet_search() {
index.update_settings_filterable_attributes(json!(["genres"])).await;
index.update_settings_typo_tolerance(json!({ "disableOnWords": ["adventre"] })).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
@ -276,7 +276,7 @@ async fn simple_facet_search_with_max_values() {
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -298,7 +298,7 @@ async fn simple_facet_search_by_count_with_max_values() {
.await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -314,7 +314,7 @@ async fn non_filterable_facet_search_error() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -333,7 +333,7 @@ async fn facet_search_dont_support_words() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
@ -351,7 +351,7 @@ async fn simple_facet_search_with_sort_by_count() {
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -370,7 +370,7 @@ async fn add_documents_and_deactivate_facet_search() {
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"facetSearch": false,
@ -378,7 +378,7 @@ async fn add_documents_and_deactivate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -406,10 +406,10 @@ async fn deactivate_facet_search_and_add_documents() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -437,10 +437,10 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -448,7 +448,7 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -469,10 +469,10 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -480,7 +480,7 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -920,13 +920,13 @@ async fn distinct_facet_search_on_movies() {
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;

View File

@ -90,7 +90,7 @@ async fn search_with_contains_filter() {
let documents = DOCUMENTS.clone();
let (request, _code) = index.add_documents(documents, None).await;
index.wait_task(request.uid()).await.succeeded();
server.wait_task(request.uid()).await.succeeded();
let (response, code) = index
.search_post(json!({
@ -257,7 +257,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index
.update_settings(json!({"filterableAttributes": [{
@ -269,7 +269,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@ -334,7 +334,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@ -445,7 +445,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter returns an error
index
@ -544,7 +544,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index

View File

@ -26,7 +26,7 @@ async fn search_formatted_from_sdk() {
{ "id": 42, "title": "The Hitchhiker's Guide to the Galaxy" }
]);
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await;
index
.search(
@ -65,7 +65,7 @@ async fn formatted_contain_wildcard() {
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index.search(json!({ "q": "pésti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }),
|response, code|
@ -398,7 +398,7 @@ async fn displayedattr_2_smol() {
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
@ -596,7 +596,7 @@ async fn test_cjk_highlight() {
{ "id": 1, "title": "大卫到了扫罗那里" },
]);
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({"q": "", "attributesToHighlight": ["title"]}), |response, code| {

View File

@ -17,11 +17,11 @@ async fn index_with_documents_user_provided<'a>(
"dimensions": 2}}} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
}
@ -37,11 +37,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Val
}}} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
}
@ -499,7 +499,7 @@ async fn query_combination() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Invalid request: missing `hybrid` parameter when `vector` is present.",
"message": "Invalid request: missing `hybrid` parameter when `vector` or `media` are present.",
"code": "missing_search_hybrid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#missing_search_hybrid"
@ -543,7 +543,7 @@ async fn distinct_is_applied() {
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// pure keyword
let (response, code) = index
@ -633,7 +633,7 @@ async fn retrieve_vectors() {
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.search_post(
@ -683,7 +683,7 @@ async fn retrieve_vectors() {
let (response, code) =
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.search_post(

View File

@ -99,7 +99,7 @@ async fn simple_search() {
)
.await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// english
index
@ -215,7 +215,7 @@ async fn force_locales() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese detection
index
@ -293,7 +293,7 @@ async fn force_locales_with_pattern() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese detection
index
@ -369,7 +369,7 @@ async fn force_locales_with_pattern_nested() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese
index
@ -444,7 +444,7 @@ async fn force_different_locales_with_pattern() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// force chinese
index
@ -522,7 +522,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// auto infer any language
index
@ -596,7 +596,7 @@ async fn auto_infer_locales_at_search() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -695,7 +695,7 @@ async fn force_different_locales_with_pattern_nested() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese
index
@ -773,7 +773,7 @@ async fn settings_change() {
let documents = NESTED_DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.update_settings(json!({
"searchableAttributes": ["document_en", "document_ja", "document_zh"],
@ -792,7 +792,7 @@ async fn settings_change() {
"enqueuedAt": "[date]"
}
"###);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// chinese
index
@ -855,7 +855,7 @@ async fn settings_change() {
"enqueuedAt": "[date]"
}
"###);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// chinese
index
@ -910,7 +910,7 @@ async fn invalid_locales() {
)
.await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"q": "Atta", "locales": ["invalid"]})).await;
snapshot!(code, @"400 Bad Request");
@ -1028,7 +1028,7 @@ async fn simple_facet_search() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.facet_search(json!({"facetName": "name_zh", "facetQuery": "進撃", "locales": ["cmn"]}))
@ -1090,7 +1090,7 @@ async fn facet_search_with_localized_attributes() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.facet_search(json!({"facetName": "name_zh", "facetQuery": "进击", "locales": ["cmn"]}))
@ -1159,7 +1159,7 @@ async fn swedish_search() {
]
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
// infer swedish
index
@ -1280,7 +1280,7 @@ async fn german_search() {
]
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
// infer swedish
index

View File

@ -9,7 +9,7 @@ async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value)
let index = server.unique_index();
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
}

View File

@ -38,11 +38,11 @@ async fn test_settings_documents_indexing_swapping_and_search(
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index.search(query.clone(), test.clone()).await;
@ -51,11 +51,11 @@ async fn test_settings_documents_indexing_swapping_and_search(
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index.search(query.clone(), test.clone()).await;
}
@ -104,7 +104,7 @@ async fn bug_5547() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let mut documents = Vec::new();
for i in 0..65_535 {
@ -112,7 +112,7 @@ async fn bug_5547() {
}
let (response, _code) = index.add_documents(json!(documents), Some("id")).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"q": "title"})).await;
assert_eq!(code, 200);
snapshot!(response["hits"], @r###"[{"id":0,"title":"title0"},{"id":1,"title":"title1"},{"id":10,"title":"title10"},{"id":100,"title":"title100"},{"id":101,"title":"title101"},{"id":102,"title":"title102"},{"id":103,"title":"title103"},{"id":104,"title":"title104"},{"id":105,"title":"title105"},{"id":106,"title":"title106"},{"id":107,"title":"title107"},{"id":108,"title":"title108"},{"id":1000,"title":"title1000"},{"id":1001,"title":"title1001"},{"id":1002,"title":"title1002"},{"id":1003,"title":"title1003"},{"id":1004,"title":"title1004"},{"id":1005,"title":"title1005"},{"id":1006,"title":"title1006"},{"id":1007,"title":"title1007"}]"###);
@ -131,7 +131,7 @@ async fn search_with_stop_word() {
let documents = DOCUMENTS.clone();
let (task, _code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// prefix search
index
@ -196,7 +196,7 @@ async fn search_with_typo_settings() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "287947" }), |response, code| {
@ -228,7 +228,7 @@ async fn phrase_search_with_stop_word() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "how \"to\" train \"the" }), |response, code| {
@ -308,11 +308,11 @@ async fn negative_special_cases_search() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) =
index.update_settings(json!({"synonyms": { "escape": ["gläss"] }})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass
index
@ -338,7 +338,7 @@ async fn test_kanji_language_detection() {
{ "id": 2, "title": "הַשּׁוּעָל הַמָּהִיר (״הַחוּם״) לֹא יָכוֹל לִקְפֹּץ 9.94 מֶטְרִים, נָכוֹן? ברר, 1.5°C- בַּחוּץ!" }
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "東京"}), |response, code| {
@ -361,10 +361,10 @@ async fn test_thai_language() {
{ "id": 2, "title": "สบู่สมุนไพรฝางแดงผสมว่านหางจรเข้ 100 กรัม จำนวน 6 ก้อน" }
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update_settings(json!({"rankingRules": ["exactness"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "สบู"}), |response, code| {
@ -586,7 +586,7 @@ async fn displayed_attributes() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
@ -601,7 +601,7 @@ async fn placeholder_search_is_hard_limited() {
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
let (task, _status_code) = index.add_documents(documents.into(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -630,7 +630,7 @@ async fn placeholder_search_is_hard_limited() {
let (task, _status_code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -665,7 +665,7 @@ async fn search_is_hard_limited() {
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
let (task, _status_code) = index.add_documents(documents.into(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -696,7 +696,7 @@ async fn search_is_hard_limited() {
let (task, _status_code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -735,7 +735,7 @@ async fn faceting_max_values_per_facet() {
let documents: Vec<_> = (0..10_000).map(|id| json!({ "id": id, "number": id * 10 })).collect();
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -752,7 +752,7 @@ async fn faceting_max_values_per_facet() {
let (task, _status_code) =
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -1033,7 +1033,7 @@ async fn test_degraded_score_details() {
index.add_documents(json!(documents), None).await;
// We can't really use anything else than 0ms here; otherwise, the test will get flaky.
let (res, _code) = index.update_settings(json!({ "searchCutoffMs": 0 })).await;
index.wait_task(res.uid()).await.succeeded();
server.wait_task(res.uid()).await.succeeded();
index
.search(
@ -1126,7 +1126,7 @@ async fn camelcased_words() {
{ "id": 4, "title": "testab" },
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "deLonghi"}), |response, code| {
@ -1345,12 +1345,12 @@ async fn simple_search_with_strange_synonyms() {
let (task, _status_code) =
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
let r = index.wait_task(task.uid()).await.succeeded();
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "How to train"}), |response, code| {
@ -1416,11 +1416,11 @@ async fn change_attributes_settings() {
let documents = NESTED_DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task,_status_code) =
index.update_settings(json!({ "searchableAttributes": ["father", "mother", "doggos"], "filterableAttributes": ["doggos"] })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// search
index
@ -1923,7 +1923,7 @@ async fn change_facet_casing() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
.add_documents(
@ -1936,7 +1936,7 @@ async fn change_facet_casing() {
None,
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
.add_documents(
@ -1949,7 +1949,7 @@ async fn change_facet_casing() {
None,
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({ "facets": ["dog"] }), |response, code| {
@ -2062,7 +2062,7 @@ async fn simple_search_changing_unrelated_settings() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "Dragon"}), |response, code| {
@ -2084,7 +2084,7 @@ async fn simple_search_changing_unrelated_settings() {
let (task, _status_code) =
index.update_settings(json!({ "filterableAttributes": ["title"] })).await;
let r = index.wait_task(task.uid()).await.succeeded();
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
index
@ -2106,7 +2106,7 @@ async fn simple_search_changing_unrelated_settings() {
.await;
let (task, _status_code) = index.update_settings(json!({ "filterableAttributes": [] })).await;
let r = index.wait_task(task.uid()).await.succeeded();
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
index

View File

@ -21,7 +21,7 @@ pub async fn shared_movies_index() -> &'static Index<'static, Shared> {
let documents = DOCUMENTS.clone();
let (response, _code) = movies_index.add_documents(documents, None).await;
movies_index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (value, _) = movies_index
.update_settings(json!({
@ -37,7 +37,7 @@ pub async fn shared_movies_index() -> &'static Index<'static, Shared> {
]
}))
.await;
movies_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
movies_index.to_shared()
})
.await
@ -52,7 +52,7 @@ pub async fn shared_batman_index() -> &'static Index<'static, Shared> {
let documents = SCORE_DOCUMENTS.clone();
let (response, _code) = batman_index.add_documents(documents, None).await;
batman_index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (value, _) = batman_index
.update_settings(json!({
@ -68,7 +68,7 @@ pub async fn shared_batman_index() -> &'static Index<'static, Shared> {
]
}))
.await;
batman_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
batman_index.to_shared()
})
.await
@ -1085,14 +1085,14 @@ async fn federation_filter() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -1152,7 +1152,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -1167,7 +1167,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
]
}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// two identical placeholder searches should have all results from the first query
let (response, code) = server
@ -1365,7 +1365,7 @@ async fn federation_sort_same_indexes_same_criterion_opposite_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -1380,7 +1380,7 @@ async fn federation_sort_same_indexes_same_criterion_opposite_direction() {
]
}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// two identical placeholder searches should have all results from the first query
let (response, code) = server
@ -1424,7 +1424,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -1439,7 +1439,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
]
}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// return mothers and fathers ordered across fields.
let (response, code) = server
@ -1638,7 +1638,7 @@ async fn federation_sort_same_indexes_different_criterion_opposite_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -1653,7 +1653,7 @@ async fn federation_sort_same_indexes_different_criterion_opposite_direction() {
]
}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// two identical placeholder searches should have all results from the first query
let (response, code) = server
@ -3048,14 +3048,14 @@ async fn federation_invalid_weight() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -3082,14 +3082,14 @@ async fn federation_null_weight() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -3150,7 +3150,7 @@ async fn federation_federated_contains_pagination() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// fail when a federated query contains "limit"
let (response, code) = server
@ -3230,11 +3230,11 @@ async fn federation_federated_contains_facets() {
)
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// empty facets are actually OK
let (response, code) = server
@ -3314,7 +3314,7 @@ async fn federation_non_faceted_for_an_index() {
)
.await;
fruits_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let fruits_no_name_index = server.unique_index_with_prefix("fruits-no-name");
@ -3324,18 +3324,18 @@ async fn federation_non_faceted_for_an_index() {
)
.await;
fruits_no_name_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let fruits_no_facets_index = server.unique_index_with_prefix("fruits-no-facets");
let (value, _) =
fruits_no_facets_index.update_settings(json!({"searchableAttributes": ["name"]})).await;
fruits_no_facets_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = fruits_no_facets_index.add_documents(documents, None).await;
fruits_no_facets_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// fails
let (response, code) = server
@ -3435,7 +3435,7 @@ async fn federation_non_federated_contains_federation_option() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// fail when a non-federated query contains "federationOptions"
let (response, code) = server
@ -3473,12 +3473,12 @@ async fn federation_vector_single_index() {
}
}}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// same embedder
let (response, code) = server
@ -3670,12 +3670,12 @@ async fn federation_vector_two_indexes() {
},
}}))
.await;
vectors_animal_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = vectors_animal_index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
vectors_animal_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let vectors_sentiment_index = server.unique_index_with_prefix("vectors-sentiment");
@ -3687,12 +3687,12 @@ async fn federation_vector_two_indexes() {
}
}}))
.await;
vectors_sentiment_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = vectors_sentiment_index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
vectors_sentiment_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -4154,7 +4154,7 @@ async fn federation_facets_different_indexes_same_facet() {
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = batman_2_index.add_documents(documents, None).await;
batman_2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = batman_2_index
.update_settings(json!({
@ -4170,7 +4170,7 @@ async fn federation_facets_different_indexes_same_facet() {
]
}))
.await;
batman_2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// return titles ordered across indexes
let (response, code) = server
@ -4677,7 +4677,7 @@ async fn federation_facets_same_indexes() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = doggos_index.add_documents(documents, None).await;
doggos_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = doggos_index
.update_settings(json!({
@ -4692,13 +4692,13 @@ async fn federation_facets_same_indexes() {
]
}))
.await;
doggos_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let doggos2_index = server.unique_index_with_prefix("doggos_2");
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = doggos2_index.add_documents(documents, None).await;
doggos2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = doggos2_index
.update_settings(json!({
@ -4713,7 +4713,7 @@ async fn federation_facets_same_indexes() {
]
}))
.await;
doggos2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {
@ -4980,7 +4980,7 @@ async fn federation_inconsistent_merge_order() {
let documents = DOCUMENTS.clone();
let (value, _) = movies2_index.add_documents(documents, None).await;
movies2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = movies2_index
.update_settings(json!({
@ -4999,7 +4999,7 @@ async fn federation_inconsistent_merge_order() {
}
}))
.await;
movies2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let batman_index = shared_batman_index().await;

View File

@ -114,14 +114,14 @@ async fn ensure_placeholder_search_hit_count_valid() {
}
]);
let (task, _code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index
.update_settings(
json!({ "rankingRules": ["distinct:asc"], "distinctAttribute": "distinct"}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
for page in 0..=4 {
index

View File

@ -9,7 +9,7 @@ async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value)
let index = server.unique_index();
let (task, _code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
}
@ -65,7 +65,7 @@ async fn search_no_searchable_attribute_set() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -78,7 +78,7 @@ async fn search_no_searchable_attribute_set() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -109,7 +109,7 @@ async fn search_on_all_attributes_restricted_set() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
@ -194,7 +194,7 @@ async fn word_ranking_rule_order_exact_words() {
let (task, _status_code) = index
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// simple search should return 2 documents (ids: 2 and 3).
index
@ -360,7 +360,7 @@ async fn search_on_exact_field() {
let (response, code) =
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// Searching on an exact attribute should only return the document matching without typo.
index
.search(json!({"q": "Marvel", "attributesToSearchOn": ["exact"]}), |response, code| {
@ -557,7 +557,7 @@ async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
let (task, _status_code) =
index.update_settings_searchable_attributes(json!(["details.title"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -595,7 +595,7 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@ -608,7 +608,7 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(

View File

@ -7,7 +7,7 @@ async fn set_and_reset_distinct_attribute() {
let index = server.unique_index();
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(task1.uid()).await.succeeded();
server.wait_task(task1.uid()).await.succeeded();
let (response, _) = index.settings().await;
@ -15,7 +15,7 @@ async fn set_and_reset_distinct_attribute() {
let (task2, _status_code) = index.update_settings(json!({ "distinctAttribute": null })).await;
index.wait_task(task2.uid()).await.succeeded();
server.wait_task(task2.uid()).await.succeeded();
let (response, _) = index.settings().await;
@ -28,7 +28,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let index = server.unique_index();
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(update_task1.uid()).await.succeeded();
server.wait_task(update_task1.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@ -36,7 +36,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let (update_task2, _status_code) = index.update_distinct_attribute(json!(null)).await;
index.wait_task(update_task2.uid()).await.succeeded();
server.wait_task(update_task2.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;

View File

@ -58,7 +58,7 @@ macro_rules! test_setting_routes {
let index = server.unique_index();
let (response, code) = index.create(None).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let url = format!("/indexes/{}/settings/{}",
index.uid,
stringify!($setting)
@ -209,7 +209,7 @@ async fn get_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
let settings = response.as_object().unwrap();
@ -247,6 +247,20 @@ async fn get_settings() {
assert_eq!(settings["prefixSearch"], json!("indexingTime"));
assert_eq!(settings["facetSearch"], json!(true));
assert_eq!(settings["embedders"], json!({}));
assert_eq!(settings["synonyms"], json!({}));
assert_eq!(
settings["typoTolerance"],
json!({
"enabled": true,
"minWordSizeForTypos": {
"oneTypo": 5,
"twoTypos": 9
},
"disableOnWords": [],
"disableOnAttributes": [],
"disableOnNumbers": false
})
);
}
#[actix_rt::test]
@ -254,7 +268,7 @@ async fn secrets_are_hidden_in_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -285,7 +299,7 @@ async fn secrets_are_hidden_in_settings() {
let settings_update_uid = response.uid();
index.wait_task(settings_update_uid).await.succeeded();
server.wait_task(settings_update_uid).await.succeeded();
let (response, code) = index.settings().await;
meili_snap::snapshot!(code, @"200 OK");
@ -384,14 +398,14 @@ async fn test_partial_update() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["foo"]));
assert_eq!(response["searchableAttributes"], json!(["*"]));
let (task, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
@ -406,7 +420,7 @@ async fn error_delete_settings_unexisting_index() {
let (task, code) = index.delete_settings().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@ -424,12 +438,19 @@ async fn reset_all_settings() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (update_task,_status_code) = index
.update_settings(json!({"displayedAttributes": ["name", "age"], "searchableAttributes": ["name"], "stopWords": ["the"], "filterableAttributes": ["age"], "synonyms": {"puppy": ["dog", "doggo", "potat"] }}))
let (update_task, _status_code) = index
.update_settings(json!({
"displayedAttributes": ["name", "age"],
"searchableAttributes": ["name"],
"stopWords": ["the"],
"filterableAttributes": ["age"],
"synonyms": {"puppy": ["dog", "doggo", "potat"] },
"typoTolerance": {"disableOnNumbers": true}
}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
@ -437,9 +458,22 @@ async fn reset_all_settings() {
assert_eq!(response["stopWords"], json!(["the"]));
assert_eq!(response["synonyms"], json!({"puppy": ["dog", "doggo", "potat"] }));
assert_eq!(response["filterableAttributes"], json!(["age"]));
assert_eq!(
response["typoTolerance"],
json!({
"enabled": true,
"minWordSizeForTypos": {
"oneTypo": 5,
"twoTypos": 9
},
"disableOnWords": [],
"disableOnAttributes": [],
"disableOnNumbers": true
})
);
let (delete_task, _status_code) = index.delete_settings().await;
index.wait_task(delete_task.uid()).await.succeeded();
server.wait_task(delete_task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
@ -448,6 +482,19 @@ async fn reset_all_settings() {
assert_eq!(response["stopWords"], json!([]));
assert_eq!(response["filterableAttributes"], json!([]));
assert_eq!(response["synonyms"], json!({}));
assert_eq!(
response["typoTolerance"],
json!({
"enabled": true,
"minWordSizeForTypos": {
"oneTypo": 5,
"twoTypos": 9
},
"disableOnWords": [],
"disableOnAttributes": [],
"disableOnNumbers": false
})
);
let (response, code) = index.get_document(1, None).await;
assert_eq!(code, 200);
@ -460,11 +507,11 @@ async fn update_setting_unexisting_index() {
let index = server.unique_index();
let (task, code) = index.update_settings(json!({})).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get().await;
assert_eq!(code, 200);
let (task, _status_code) = index.delete_settings().await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
@ -507,7 +554,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let index = server.unique_index();
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@ -515,7 +562,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let (task, _status_code) = index.update_distinct_attribute(json!(null)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@ -540,7 +587,7 @@ async fn granular_filterable_attributes() {
{ "attributePatterns": ["default-facet-search"], "features": { "filter": {"equality": true, "comparison": true} } },
] })).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200, "{response}");

View File

@ -30,7 +30,7 @@ async fn attribute_scale_search() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -39,7 +39,7 @@ async fn attribute_scale_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// the expected order is [1, 3, 2] instead of [3, 1, 2]
// because the attribute scale doesn't make the difference between 1 and 3.
@ -103,7 +103,7 @@ async fn attribute_scale_phrase_search() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = index
.update_settings(json!({
@ -111,7 +111,7 @@ async fn attribute_scale_phrase_search() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// the expected order is [1, 3] instead of [3, 1]
// because the attribute scale doesn't make the difference between 1 and 3.
@ -171,7 +171,7 @@ async fn word_scale_set_and_reset() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Set and reset the setting ensuring the swap between the 2 settings is applied.
let (update_task1, _code) = index
@ -180,7 +180,7 @@ async fn word_scale_set_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(update_task1.uid()).await.succeeded();
server.wait_task(update_task1.uid()).await.succeeded();
let (update_task2, _code) = index
.update_settings(json!({
@ -188,7 +188,7 @@ async fn word_scale_set_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(update_task2.uid()).await.succeeded();
server.wait_task(update_task2.uid()).await.succeeded();
// [3, 1, 2]
index
@ -286,7 +286,7 @@ async fn attribute_scale_default_ranking_rules() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -294,7 +294,7 @@ async fn attribute_scale_default_ranking_rules() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// the expected order is [3, 1, 2]
index

View File

@ -15,7 +15,7 @@ async fn set_and_reset() {
"dictionary": ["J.R.R.", "J. R. R."],
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.settings().await;
snapshot!(json_string!(response["nonSeparatorTokens"]), @r###"
@ -45,7 +45,7 @@ async fn set_and_reset() {
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.settings().await;
snapshot!(json_string!(response["nonSeparatorTokens"]), @"[]");
@ -74,7 +74,7 @@ async fn set_and_search() {
let index = server.unique_index();
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
server.wait_task(add_task.uid()).await.succeeded();
let (update_task, _code) = index
.update_settings(json!({
@ -83,7 +83,7 @@ async fn set_and_search() {
"dictionary": ["#", "A#", "B#", "C#", "D#", "E#", "F#", "G#"],
}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
index
.search(json!({"q": "&", "attributesToHighlight": ["content"]}), |response, code| {
@ -228,7 +228,7 @@ async fn advanced_synergies() {
let index = server.unique_index();
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
server.wait_task(add_task.uid()).await.succeeded();
let (update_task, _code) = index
.update_settings(json!({
@ -243,7 +243,7 @@ async fn advanced_synergies() {
}
}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
index
.search(json!({"q": "J.R.R.", "attributesToHighlight": ["content"]}), |response, code| {
@ -353,7 +353,7 @@ async fn advanced_synergies() {
"dictionary": ["J.R.R.", "J. R. R.", "J.K.", "J. K."],
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
index
.search(json!({"q": "jk", "attributesToHighlight": ["content"]}), |response, code| {

View File

@ -97,7 +97,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
@ -108,7 +108,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
@ -119,7 +119,7 @@ async fn task_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View File

@ -43,7 +43,7 @@ async fn version_too_old() {
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.2");
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.16.0");
}
#[actix_rt::test]
@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.15.3 is higher than the Meilisearch version 1.15.2. Downgrade is not supported");
snapshot!(err, @"Database version 1.16.1 is higher than the Meilisearch version 1.16.0. Downgrade is not supported");
}
#[actix_rt::test]

View File

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.2"
"upgradeTo": "v1.16.0"
},
"stats": {
"totalNbTasks": 1,

View File

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.2"
"upgradeTo": "v1.16.0"
},
"stats": {
"totalNbTasks": 1,

View File

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.2"
"upgradeTo": "v1.16.0"
},
"stats": {
"totalNbTasks": 1,

View File

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.2"
"upgradeTo": "v1.16.0"
},
"error": null,
"duration": "[duration]",

View File

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.2"
"upgradeTo": "v1.16.0"
},
"error": null,
"duration": "[duration]",

View File

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.2"
"upgradeTo": "v1.16.0"
},
"error": null,
"duration": "[duration]",

View File

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.2"
"upgradeTo": "v1.16.0"
},
"stats": {
"totalNbTasks": 1,

View File

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.2"
"upgradeTo": "v1.16.0"
},
"error": null,
"duration": "[duration]",

View File

@ -1,7 +1,10 @@
use std::collections::BTreeMap;
use std::sync::atomic::AtomicUsize;
use std::time::Duration;
use meili_snap::{json_string, snapshot};
use reqwest::IntoUrl;
use tokio::sync::mpsc;
use wiremock::matchers::{method, path};
use wiremock::{Mock, MockServer, Request, ResponseTemplate};
@ -334,6 +337,41 @@ async fn create_mock_raw() -> (MockServer, Value) {
(mock_server, embedder_settings)
}
async fn create_faulty_mock_raw(sender: mpsc::Sender<()>) -> (MockServer, Value) {
let mock_server = MockServer::start().await;
let count = AtomicUsize::new(0);
Mock::given(method("POST"))
.and(path("/"))
.respond_with(move |_req: &Request| {
let count = count.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
if count >= 5 {
let _ = sender.try_send(());
ResponseTemplate::new(500)
.set_delay(Duration::from_secs(u64::MAX)) // Make the response hang forever
.set_body_string("Service Unavailable")
} else {
ResponseTemplate::new(500).set_body_string("Service Unavailable")
}
})
.mount(&mock_server)
.await;
let url = mock_server.uri();
let embedder_settings = json!({
"source": "rest",
"url": url,
"dimensions": 3,
"request": "{{text}}",
"response": "{{embedding}}",
"documentTemplate": "{{doc.name}}"
});
(mock_server, embedder_settings)
}
pub async fn post<T: IntoUrl>(url: T, text: &str) -> reqwest::Result<reqwest::Response> {
reqwest::Client::builder().build()?.post(url).json(&json!(text)).send().await
}
@ -370,13 +408,13 @@ async fn bad_request() {
.await;
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
}
"###);
{
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
}
"###);
// A repeat string appears inside a repeated value
let (response, code) = index
@ -399,7 +437,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input.input`: \"{{..}}\" appears nested inside of a value that is itself repeated",
"message": "Error while generating embeddings: user error: in `request.input.input`: \"{{..}}\" appears nested inside of a value that is itself repeated\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -422,7 +460,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input.repeat`: \"{{..}}\" appears outside of an array",
"message": "Error while generating embeddings: user error: in `request.input.repeat`: \"{{..}}\" appears outside of an array\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -445,7 +483,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #0",
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #0\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -468,7 +506,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #2",
"message": "Error while generating embeddings: user error: in `request.input`: \"{{..}}\" expected at position #1, but found at position #2\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -491,7 +529,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.input[0]`: Expected \"{{text}}\" inside of the repeated value",
"message": "Error while generating embeddings: user error: in `request.input[0]`: Expected \"{{text}}\" inside of the repeated value\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -518,7 +556,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{..}}\", but it was already present in `request.input`",
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{..}}\", but it was already present in `request.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -539,7 +577,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input`",
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -560,7 +598,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.repeated.data[1]`: Found \"{{text}}\", but it was already present in `request.repeated.input`",
"message": "Error while generating embeddings: user error: in `request.repeated.data[1]`: Found \"{{text}}\", but it was already present in `request.repeated.input`\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -581,7 +619,7 @@ async fn bad_request() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input[0]` (repeated)",
"message": "Error while generating embeddings: user error: in `request.data`: Found \"{{text}}\", but it was already present in `request.input[0]` (repeated)\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -882,7 +920,7 @@ async fn bad_settings() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found",
"message": "Error while generating embeddings: user error: in `request`: \"{{text}}\" not found\n - Note: this template is using a document template, and so expects to contain the placeholder \"{{text}}\" rather than \"{{fragment}}\"",
"code": "vector_embedding_error",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#vector_embedding_error"
@ -2111,3 +2149,71 @@ async fn searchable_reindex() {
}
"###);
}
#[actix_rt::test]
async fn last_error_stats() {
let (sender, mut receiver) = mpsc::channel(10);
let (_mock, setting) = create_faulty_mock_raw(sender).await;
let server = get_server_vector().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
"embedders": {
"rest": setting,
},
}))
.await;
snapshot!(code, @"202 Accepted");
let task = server.wait_task(response.uid()).await;
snapshot!(task["status"], @r###""succeeded""###);
let documents = json!([
{"id": 0, "name": "will_return_500"},
{"id": 1, "name": "will_error"},
{"id": 2, "name": "must_error"},
]);
let (_value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
// The task will eventually fail, so let's not wait for it.
// Let's just wait for the server's signal
receiver.recv().await;
let (response, _code) = index.filtered_batches(&[], &[], &[]).await;
snapshot!(json_string!(response["results"][0], {
".progress" => "[ignored]",
".stats.embedderRequests.total" => "[ignored]",
".stats.embedderRequests.failed" => "[ignored]",
".startedAt" => "[ignored]"
}), @r#"
{
"uid": 1,
"progress": "[ignored]",
"details": {
"receivedDocuments": 3,
"indexedDocuments": null
},
"stats": {
"totalNbTasks": 1,
"status": {
"processing": 1
},
"types": {
"documentAdditionOrUpdate": 1
},
"indexUids": {
"doggo": 1
},
"embedderRequests": {
"total": "[ignored]",
"failed": "[ignored]",
"lastError": "runtime error: received internal error HTTP 500 from embedding server\n - server replied with `Service Unavailable`"
}
},
"duration": null,
"startedAt": "[ignored]",
"finishedAt": null,
"batchStrategy": "batched all enqueued tasks"
}
"#);
}