diff --git a/crates/meilisearch/tests/common/mod.rs b/crates/meilisearch/tests/common/mod.rs index 9823bf483..00e4732d0 100644 --- a/crates/meilisearch/tests/common/mod.rs +++ b/crates/meilisearch/tests/common/mod.rs @@ -128,7 +128,8 @@ impl Display for Value { ".finishedAt" => "[date]", ".duration" => "[duration]", ".processingTimeMs" => "[duration]", - ".details.embedders.*.url" => "[url]" + ".details.embedders.*.url" => "[url]", + ".details.dumpUid" => "[dump_uid]", }) ) } diff --git a/crates/meilisearch/tests/tasks/mod.rs b/crates/meilisearch/tests/tasks/mod.rs index f432ef7db..c31e43883 100644 --- a/crates/meilisearch/tests/tasks/mod.rs +++ b/crates/meilisearch/tests/tasks/mod.rs @@ -1,8 +1,7 @@ mod errors; mod webhook; -use meili_snap::insta::assert_json_snapshot; -use meili_snap::snapshot; +use meili_snap::{json_string, snapshot}; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; @@ -11,14 +10,12 @@ use crate::json; #[actix_rt::test] async fn error_get_unexisting_task_status() { - let server = Server::new().await; - let index = server.index("test"); - let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); - let (response, code) = index.get_task(1).await; + let server = Server::new_shared(); + let index = server.unique_index(); + let (response, code) = index.get_task(u32::MAX as u64).await; let expected_response = json!({ - "message": "Task `1` not found.", + "message": "Task `4294967295` not found.", "code": "task_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#task_not_found" @@ -30,8 +27,8 @@ async fn error_get_unexisting_task_status() { #[actix_rt::test] async fn get_task_status() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (create_task, _status_code) = index.create(None).await; let (add_task, _status_code) = index .add_documents( @@ -50,6 +47,7 @@ async fn get_task_status() { #[actix_rt::test] async fn list_tasks() { + // Do not use a shared server because we want to assert stuff against the global list of tasks let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; @@ -64,6 +62,7 @@ async fn list_tasks() { #[actix_rt::test] async fn list_tasks_pagination_and_reverse() { + // do not use a shared server here, as we want to assert tasks ids and we need them to be stable let server = Server::new().await; // First of all we want to create a lot of tasks very quickly. The fastest way is to delete a lot of unexisting indexes let mut last_task = None; @@ -71,7 +70,7 @@ async fn list_tasks_pagination_and_reverse() { let index = server.index(format!("test-{i}")); last_task = Some(index.create(None).await.0.uid()); } - server.wait_task(last_task.unwrap()).await; + server.wait_task(last_task.unwrap()).await.succeeded(); let (response, code) = server.tasks_filter("limit=3").await; assert_eq!(code, 200); @@ -103,13 +102,14 @@ async fn list_tasks_pagination_and_reverse() { #[actix_rt::test] async fn list_tasks_with_star_filters() { let server = Server::new().await; + // Do not use a unique index here, as we want to test the `indexUids=*` filter. let index = server.index("test"); let (task, _code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.service.get("/tasks?indexUids=test").await; + let (response, code) = index.service.get(format!("/tasks?indexUids={}", index.uid)).await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 2); @@ -127,26 +127,30 @@ async fn list_tasks_with_star_filters() { let (response, code) = index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index .service - .get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test") + .get(format!( + "/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}", + index.uid + )) .await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index .service .get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*") .await; - assert_eq!(code, 200, "{:?}", response); + assert_eq!(code, 200, "{response:?}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_status_filtered() { + // Do not use a shared server because we want to assert stuff against the global list of tasks let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; @@ -155,20 +159,21 @@ async fn list_tasks_status_filtered() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_type_filtered() { + // Do not use a shared server because we want to assert stuff against the global list of tasks let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; @@ -178,32 +183,34 @@ async fn list_tasks_type_filtered() { .await; let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } #[actix_rt::test] async fn list_tasks_invalid_canceled_by_filter() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); - index + let (task, _code) = index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await; - assert_eq!(code, 200, "{}", response); + let (response, code) = + index.filtered_tasks(&[], &[], &[format!("{}", task.uid()).as_str()]).await; + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); } #[actix_rt::test] async fn list_tasks_status_and_type_filtered() { + // Do not use a shared server because we want to assert stuff against the global list of tasks let server = Server::new().await; let index = server.index("test"); let (task, _status_code) = index.create(None).await; @@ -213,7 +220,7 @@ async fn list_tasks_status_and_type_filtered() { .await; let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); let (response, code) = index @@ -223,12 +230,12 @@ async fn list_tasks_status_and_type_filtered() { &[], ) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } macro_rules! assert_valid_summarized_task { - ($response:expr, $task_type:literal, $index:literal) => {{ + ($response:expr, $task_type:literal, $index:tt) => {{ assert_eq!($response.as_object().unwrap().len(), 5); assert!($response["taskUid"].as_u64().is_some()); assert_eq!($response["indexUid"], $index); @@ -242,49 +249,49 @@ macro_rules! assert_valid_summarized_task { #[actix_web::test] async fn test_summarized_task_view() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); + let index_uid = index.uid.clone(); let (response, _) = index.create(None).await; - assert_valid_summarized_task!(response, "indexCreation", "test"); + assert_valid_summarized_task!(response, "indexCreation", index_uid); let (response, _) = index.update(None).await; - assert_valid_summarized_task!(response, "indexUpdate", "test"); + assert_valid_summarized_task!(response, "indexUpdate", index_uid); let (response, _) = index.update_settings(json!({})).await; - assert_valid_summarized_task!(response, "settingsUpdate", "test"); + assert_valid_summarized_task!(response, "settingsUpdate", index_uid); let (response, _) = index.update_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid); let (response, _) = index.add_documents(json!([{"id": 1}]), None).await; - assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test"); + assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid); let (response, _) = index.delete_document(1).await; - assert_valid_summarized_task!(response, "documentDeletion", "test"); + assert_valid_summarized_task!(response, "documentDeletion", index_uid); let (response, _) = index.clear_all_documents().await; - assert_valid_summarized_task!(response, "documentDeletion", "test"); + assert_valid_summarized_task!(response, "documentDeletion", index_uid); let (response, _) = index.delete().await; - assert_valid_summarized_task!(response, "indexDeletion", "test"); + assert_valid_summarized_task!(response, "indexDeletion", index_uid); } #[actix_web::test] async fn test_summarized_document_addition_or_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await; index.wait_task(task.uid()).await.succeeded(); - let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + let (task, _) = index.get_task(task.uid()).await; + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -303,14 +310,13 @@ async fn test_summarized_document_addition_or_update() { let (task, _status_code) = index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await; index.wait_task(task.uid()).await.succeeded(); - let (task, _) = index.get_task(1).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + let (task, _) = index.get_task(task.uid()).await; + snapshot!(task, @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -329,18 +335,22 @@ async fn test_summarized_document_addition_or_update() { #[actix_web::test] async fn test_summarized_delete_documents_by_batch() { - let server = Server::new().await; - let index = server.index("test"); - let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await; + let server = Server::new_shared(); + let index = server.unique_index(); + let non_existing_task_id1 = u32::MAX as u64; + let non_existing_task_id2 = non_existing_task_id1 - 1; + let non_existing_task_id3 = non_existing_task_id1 - 2; + let (task, _status_code) = index + .delete_batch(vec![non_existing_task_id1, non_existing_task_id2, non_existing_task_id3]) + .await; index.wait_task(task.uid()).await.failed(); - let (task, _) = index.get_task(0).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + let (task, _) = index.get_task(task.uid()).await; + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -350,7 +360,7 @@ async fn test_summarized_delete_documents_by_batch() { "originalFilter": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -366,13 +376,12 @@ async fn test_summarized_delete_documents_by_batch() { let (del_task, _status_code) = index.delete_batch(vec![42]).await; index.wait_task(del_task.uid()).await.succeeded(); let (task, _) = index.get_task(del_task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -392,20 +401,19 @@ async fn test_summarized_delete_documents_by_batch() { #[actix_web::test] async fn test_summarized_delete_documents_by_filter() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -415,7 +423,7 @@ async fn test_summarized_delete_documents_by_filter() { "originalFilter": "\"doggo = bernese\"" }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -432,13 +440,12 @@ async fn test_summarized_delete_documents_by_filter() { index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -448,7 +455,7 @@ async fn test_summarized_delete_documents_by_filter() { "originalFilter": "\"doggo = bernese\"" }, "error": { - "message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", + "message": "Index `[uuid]`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", "code": "invalid_document_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_document_filter" @@ -465,13 +472,12 @@ async fn test_summarized_delete_documents_by_filter() { index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 4, - "batchUid": 4, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -491,18 +497,17 @@ async fn test_summarized_delete_documents_by_filter() { #[actix_web::test] async fn test_summarized_delete_document_by_id() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.delete_document(1).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "documentDeletion", "canceledBy": null, @@ -512,7 +517,7 @@ async fn test_summarized_delete_document_by_id() { "originalFilter": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -528,13 +533,12 @@ async fn test_summarized_delete_document_by_id() { let (task, _status_code) = index.delete_document(42).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 2, - "batchUid": 2, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentDeletion", "canceledBy": null, @@ -554,12 +558,12 @@ async fn test_summarized_delete_document_by_id() { #[actix_web::test] async fn test_summarized_settings_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // here we should find my payload even in the failed task. let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await; - meili_snap::snapshot!(code, @"400 Bad Request"); - meili_snap::snapshot!(meili_snap::json_string!(response), @r###" + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" { "message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.", "code": "invalid_settings_ranking_rules", @@ -571,13 +575,12 @@ async fn test_summarized_settings_update() { let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, @@ -605,18 +608,17 @@ async fn test_summarized_settings_update() { #[actix_web::test] async fn test_summarized_index_creation() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexCreation", "canceledBy": null, @@ -634,13 +636,12 @@ async fn test_summarized_index_creation() { let (task, _status_code) = index.create(Some("doggos")).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexCreation", "canceledBy": null, @@ -648,7 +649,7 @@ async fn test_summarized_index_creation() { "primaryKey": "doggos" }, "error": { - "message": "Index `test` already exists.", + "message": "Index `[uuid]` already exists.", "code": "index_already_exists", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_already_exists" @@ -663,8 +664,8 @@ async fn test_summarized_index_creation() { #[actix_web::test] async fn test_summarized_index_deletion() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); let (ret, _code) = index.delete().await; let task = index.wait_task(ret.uid()).await; snapshot!(task, @@ -672,7 +673,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "indexDeletion", "canceledBy": null, @@ -680,7 +681,7 @@ async fn test_summarized_index_deletion() { "deletedDocuments": 0 }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -703,7 +704,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, @@ -726,7 +727,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexDeletion", "canceledBy": null, @@ -749,7 +750,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "indexDeletion", "canceledBy": null, @@ -757,7 +758,7 @@ async fn test_summarized_index_deletion() { "deletedDocuments": 0 }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -772,19 +773,18 @@ async fn test_summarized_index_deletion() { #[actix_web::test] async fn test_summarized_index_update() { - let server = Server::new().await; - let index = server.index("test"); + let server = Server::new_shared(); + let index = server.unique_index(); // If the index doesn't exist yet, we should get errors with or without the primary key. let (task, _status_code) = index.update(None).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexUpdate", "canceledBy": null, @@ -792,7 +792,7 @@ async fn test_summarized_index_update() { "primaryKey": null }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -807,13 +807,12 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(Some("bones")).await; index.wait_task(task.uid()).await.failed(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 1, - "batchUid": 1, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "failed", "type": "indexUpdate", "canceledBy": null, @@ -821,7 +820,7 @@ async fn test_summarized_index_update() { "primaryKey": "bones" }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -839,13 +838,12 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 3, - "batchUid": 3, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexUpdate", "canceledBy": null, @@ -863,13 +861,12 @@ async fn test_summarized_index_update() { let (task, _status_code) = index.update(Some("bones")).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 4, - "batchUid": 4, - "indexUid": "test", + "uid": "[uid]", + "batchUid": "[batch_uid]", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexUpdate", "canceledBy": null, @@ -887,7 +884,7 @@ async fn test_summarized_index_update() { #[actix_web::test] async fn test_summarized_index_swap() { - let server = Server::new().await; + let server = Server::new_shared(); let (task, _status_code) = server .index_swap(json!([ { "indexes": ["doggos", "cattos"] } @@ -895,12 +892,11 @@ async fn test_summarized_index_swap() { .await; server.wait_task(task.uid()).await.failed(); let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "failed", "type": "indexSwap", @@ -928,23 +924,25 @@ async fn test_summarized_index_swap() { } "###); - let (task, _code) = server.index("doggos").create(None).await; + let doggos_index = server.unique_index(); + let (task, _code) = doggos_index.create(None).await; server.wait_task(task.uid()).await.succeeded(); - let (task, _code) = server.index("cattos").create(None).await; + let cattos_index = server.unique_index(); + let (task, _code) = cattos_index.create(None).await; server.wait_task(task.uid()).await.succeeded(); let (task, _code) = server .index_swap(json!([ - { "indexes": ["doggos", "cattos"] } + { "indexes": [doggos_index.uid, cattos_index.uid] } ])) .await; server.wait_task(task.uid()).await.succeeded(); let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.indexes[0]" => "doggos", ".**.indexes[1]" => "cattos", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 3, - "batchUid": 3, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "indexSwap", @@ -970,20 +968,21 @@ async fn test_summarized_index_swap() { #[actix_web::test] async fn test_summarized_task_cancelation() { - let server = Server::new().await; - let index = server.index("doggos"); + let server = Server::new_shared(); + let index = server.unique_index(); // to avoid being flaky we're only going to cancel an already finished task :( let (task, _status_code) = index.create(None).await; - index.wait_task(task.uid()).await.succeeded(); - let (task, _status_code) = server.cancel_tasks("uids=0").await; + let task_uid = task.uid(); + index.wait_task(task_uid).await.succeeded(); + let (task, _status_code) = server.cancel_tasks(format!("uids={task_uid}").as_str()).await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(json_string!(task, + { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.originalFilter" => "[of]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "taskCancelation", @@ -991,7 +990,7 @@ async fn test_summarized_task_cancelation() { "details": { "matchedTasks": 1, "canceledTasks": 0, - "originalFilter": "?uids=0" + "originalFilter": "[of]" }, "error": null, "duration": "[duration]", @@ -1004,20 +1003,19 @@ async fn test_summarized_task_cancelation() { #[actix_web::test] async fn test_summarized_task_deletion() { - let server = Server::new().await; - let index = server.index("doggos"); + let server = Server::new_shared(); + let index = server.unique_index(); // to avoid being flaky we're only going to delete an already finished task :( let (task, _status_code) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = server.delete_tasks("uids=0").await; index.wait_task(task.uid()).await.succeeded(); let (task, _) = index.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 1, - "batchUid": 1, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "taskDeletion", @@ -1038,22 +1036,21 @@ async fn test_summarized_task_deletion() { #[actix_web::test] async fn test_summarized_dump_creation() { - let server = Server::new().await; + let server = Server::new_shared(); let (task, _status_code) = server.create_dump().await; server.wait_task(task.uid()).await; let (task, _) = server.get_task(task.uid()).await; - assert_json_snapshot!(task, - { ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + snapshot!(task, @r###" { - "uid": 0, - "batchUid": 0, + "uid": "[uid]", + "batchUid": "[batch_uid]", "indexUid": null, "status": "succeeded", "type": "dumpCreation", "canceledBy": null, "details": { - "dumpUid": "[dumpUid]" + "dumpUid": "[dump_uid]" }, "error": null, "duration": "[duration]",