From cb15e5c67e1c6fed85f3d6c85875b9bb34e75093 Mon Sep 17 00:00:00 2001 From: Martin Tzvetanov Grigorov Date: Tue, 3 Jun 2025 09:13:56 +0300 Subject: [PATCH] WIP: More snapshot updates Signed-off-by: Martin Tzvetanov Grigorov --- crates/meilisearch/tests/batches/mod.rs | 107 +++++++++++------------- 1 file changed, 48 insertions(+), 59 deletions(-) diff --git a/crates/meilisearch/tests/batches/mod.rs b/crates/meilisearch/tests/batches/mod.rs index 4613f71fc..bb926af70 100644 --- a/crates/meilisearch/tests/batches/mod.rs +++ b/crates/meilisearch/tests/batches/mod.rs @@ -12,10 +12,10 @@ async fn error_get_unexisting_batch_status() { let index = server.unique_index(); let (task, _coder) = index.create(None).await; index.wait_task(task.uid()).await.succeeded(); - let (response, code) = index.get_batch(1).await; + let (response, code) = index.get_batch(task.uid() as u32).await; let expected_response = json!({ - "message": "Batch `1` not found.", + "message": format!("Batch `{}` not found.", task.uid()), "code": "batch_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#batch_not_found" @@ -147,15 +147,15 @@ async fn list_batches_status_filtered() { index.wait_task(task.uid()).await.failed(); let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = index.filtered_batches(&[], &["succeeded", "failed"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } @@ -168,16 +168,16 @@ async fn list_batches_type_filtered() { let (task, _) = index.delete().await; index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.filtered_batches(&["indexCreation"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); let (response, code) = - index.filtered_batches(&["indexCreation", "IndexDeletion"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + index.filtered_batches(&["indexCreation", "indexDeletion"], &[], &[]).await; + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index.filtered_batches(&["indexCreation"], &[], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 1); } @@ -189,7 +189,7 @@ async fn list_batches_invalid_canceled_by_filter() { index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.filtered_batches(&[], &[], &["0"]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); } @@ -203,7 +203,7 @@ async fn list_batches_status_and_type_filtered() { index.wait_task(task.uid()).await.succeeded(); let (response, code) = index.filtered_batches(&["indexCreation"], &["failed"], &[]).await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 0); let (response, code) = index @@ -213,7 +213,7 @@ async fn list_batches_status_and_type_filtered() { &[], ) .await; - assert_eq!(code, 200, "{}", response); + assert_eq!(code, 200, "{response}"); assert_eq!(response["results"].as_array().unwrap().len(), 2); } @@ -222,7 +222,7 @@ async fn list_batch_filter_error() { let server = Server::new_shared(); let (response, code) = server.batches_filter("lol=pied").await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); meili_snap::snapshot!(meili_snap::json_string!(response), @r#" { "message": "Unknown parameter `lol`: expected one of `limit`, `from`, `reverse`, `batchUids`, `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`", @@ -233,7 +233,7 @@ async fn list_batch_filter_error() { "#); let (response, code) = server.batches_filter("uids=pied").await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); meili_snap::snapshot!(meili_snap::json_string!(response), @r#" { "message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer", @@ -244,7 +244,7 @@ async fn list_batch_filter_error() { "#); let (response, code) = server.batches_filter("from=pied").await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); meili_snap::snapshot!(meili_snap::json_string!(response), @r#" { "message": "Invalid value in parameter `from`: could not parse `pied` as a positive integer", @@ -255,7 +255,7 @@ async fn list_batch_filter_error() { "#); let (response, code) = server.batches_filter("beforeStartedAt=pied").await; - assert_eq!(code, 400, "{}", response); + assert_eq!(code, 400, "{response}"); meili_snap::snapshot!(meili_snap::json_string!(response), @r#" { "message": "Invalid value in parameter `beforeStartedAt`: `pied` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", @@ -283,7 +283,8 @@ async fn test_summarized_document_addition_or_update() { ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", ".stats.writeChannelCongestion" => "[writeChannelCongestion]", - ".stats.internalDatabaseSizes" => "[internalDatabaseSizes]" + ".stats.internalDatabaseSizes" => "[internalDatabaseSizes]", + ".stats.indexUids" => r#"""{"test": 1}"""# }, @r###" { @@ -301,9 +302,7 @@ async fn test_summarized_document_addition_or_update() { "types": { "documentAdditionOrUpdate": 1 }, - "indexUids": { - "test": 1 - }, + "indexUids": {"test": 1}, "progressTrace": "[progressTrace]", "writeChannelCongestion": "[writeChannelCongestion]", "internalDatabaseSizes": "[internalDatabaseSizes]" @@ -376,7 +375,7 @@ async fn test_summarized_delete_documents_by_batch() { ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", ".stats.writeChannelCongestion" => "[writeChannelCongestion]", - ".stats.indexUids" => "{\n\t\"test\": 1}" + ".stats.indexUids" => r#"""{"test": 1}"""# }, @r###" { @@ -394,9 +393,7 @@ async fn test_summarized_delete_documents_by_batch() { "types": { "documentDeletion": 1 }, - "indexUids": { - "test": 1 - }, + "indexUids": {"test": 1}, "progressTrace": "[progressTrace]" }, "duration": "[duration]", @@ -467,7 +464,8 @@ async fn test_summarized_delete_documents_by_filter() { ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", ".stats.writeChannelCongestion" => "[writeChannelCongestion]", - ".stats.indexUids" => "{\n\t\"test\": 1}" + ".stats.indexUids" => r#"""{"test": 1}"""#, + ".batchCreationComplete" => "batched all enqueued tasks for index `[uuid`" }, @r###" { @@ -486,15 +484,13 @@ async fn test_summarized_delete_documents_by_filter() { "types": { "documentDeletion": 1 }, - "indexUids": { - "test": 1 - }, + "indexUids": {"test": 1}, "progressTrace": "[progressTrace]" }, "duration": "[duration]", "startedAt": "[date]", "finishedAt": "[date]", - "batchCreationComplete": "batched all enqueued tasks" + "batchCreationComplete": "batched all enqueued tasks for index `[uuid`" } "###); @@ -603,7 +599,7 @@ async fn test_summarized_delete_document_by_id() { ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", ".stats.writeChannelCongestion" => "[writeChannelCongestion]", - ".stats.indexUids" => "{\n\t\"test\": 1}" + ".stats.indexUids" => r#"""{"test": 1}"""# }, @r###" { @@ -621,9 +617,7 @@ async fn test_summarized_delete_document_by_id() { "types": { "documentDeletion": 1 }, - "indexUids": { - "test": 1 - }, + "indexUids": {"test": 1}, "progressTrace": "[progressTrace]" }, "duration": "[duration]", @@ -705,7 +699,7 @@ async fn test_summarized_settings_update() { ".stats.progressTrace" => "[progressTrace]", ".stats.writeChannelCongestion" => "[writeChannelCongestion]", ".stats.internalDatabaseSizes" => "[internalDatabaseSizes]", - ".stats.indexUids" => "{\n\t\"test\": 1}", + ".stats.indexUids" => r#"""{"test": 1}"""#, ".batchCreationComplete" => "batched all enqueued tasks for index `[uuid]`" }, @@ -734,9 +728,7 @@ async fn test_summarized_settings_update() { "types": { "settingsUpdate": 1 }, - "indexUids": { - "test": 1 - }, + "indexUids": {"test": 1}, "progressTrace": "[progressTrace]" }, "duration": "[duration]", @@ -763,7 +755,7 @@ async fn test_summarized_index_creation() { ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", ".stats.writeChannelCongestion" => "[writeChannelCongestion]", - ".stats.indexUids" => "{\n\t\"test\": 1}", + ".stats.indexUids" => r#"""{"test": 1}"""#, ".batchCreationComplete" => "task with id X of type `indexCreation` cannot be batched" }, @r###" @@ -779,9 +771,7 @@ async fn test_summarized_index_creation() { "types": { "indexCreation": 1 }, - "indexUids": { - "test": 1 - }, + "indexUids": {"test": 1}, "progressTrace": "[progressTrace]" }, "duration": "[duration]", @@ -896,7 +886,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "succeeded", "type": "indexDeletion", "canceledBy": null, @@ -919,7 +909,7 @@ async fn test_summarized_index_deletion() { { "uid": "[uid]", "batchUid": "[batch_uid]", - "indexUid": "test", + "indexUid": "[uuid]", "status": "failed", "type": "indexDeletion", "canceledBy": null, @@ -927,7 +917,7 @@ async fn test_summarized_index_deletion() { "deletedDocuments": 0 }, "error": { - "message": "Index `test` not found.", + "message": "Index `[uuid]` not found.", "code": "index_not_found", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#index_not_found" @@ -947,7 +937,7 @@ async fn test_summarized_index_update() { // If the index doesn't exist yet, we should get errors with or without the primary key. let (task, _status_code) = index.update(None).await; index.wait_task(task.uid()).await.failed(); - let (batch, _) = index.get_batch(task.uid() as u32).await; + let (batch, _) = index.get_batch(task.uid() as u32).await; assert_json_snapshot!(batch, { ".uid" => "[uid]", @@ -957,7 +947,7 @@ async fn test_summarized_index_update() { ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", ".stats.writeChannelCongestion" => "[writeChannelCongestion]", - ".stats.indexUids" => "{\n\t\"test\": 1}", + ".stats.indexUids" => r#"{"test": 1}"#, ".batchCreationComplete" => "task with id X of type `indexUpdate` cannot be batched" }, @r###" @@ -973,9 +963,7 @@ async fn test_summarized_index_update() { "types": { "indexUpdate": 1 }, - "indexUids": { - "test": 1 - }, + "indexUids": {"test": 1}, "progressTrace": "[progressTrace]" }, "duration": "[duration]", @@ -1177,7 +1165,7 @@ async fn test_summarized_index_swap() { ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", ".stats.writeChannelCongestion" => "[writeChannelCongestion]", - ".stats.indexUids" => "{\n\t\"doggos\": 1}" + ".stats.indexUids" => r#"""{"doggos": 1}"""# }, @r###" { @@ -1192,9 +1180,7 @@ async fn test_summarized_index_swap() { "types": { "indexCreation": 1 }, - "indexUids": { - "doggos": 1 - }, + "indexUids": {"doggos": 1}, "progressTrace": "[progressTrace]" }, "duration": "[duration]", @@ -1214,19 +1200,21 @@ async fn test_summarized_batch_cancelation() { index.wait_task(task.uid()).await.succeeded(); let (task, _status_code) = server.cancel_tasks(format!("uids={}", task.uid()).as_str()).await; index.wait_task(task.uid()).await.succeeded(); - let (batch, _) = index.get_batch(1).await; + let (batch, _) = index.get_batch(task.uid() as u32).await; assert_json_snapshot!(batch, { + ".uid" => "[uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", - ".stats.writeChannelCongestion" => "[writeChannelCongestion]" + ".stats.writeChannelCongestion" => "[writeChannelCongestion]", + ".batchCreationComplete" => "task with id X of type `taskCancelation` cannot be batched" }, @r###" { - "uid": 1, + "uid": "[uid]", "progress": null, "details": { "matchedTasks": 1, @@ -1247,7 +1235,7 @@ async fn test_summarized_batch_cancelation() { "duration": "[duration]", "startedAt": "[date]", "finishedAt": "[date]", - "batchCreationComplete": "task with id 1 of type `taskCancelation` cannot be batched" + "batchCreationComplete": "task with id X of type `taskCancelation` cannot be batched" } "###); } @@ -1316,7 +1304,8 @@ async fn test_summarized_dump_creation() { ".startedAt" => "[date]", ".finishedAt" => "[date]", ".stats.progressTrace" => "[progressTrace]", - ".stats.writeChannelCongestion" => "[writeChannelCongestion]" + ".stats.writeChannelCongestion" => "[writeChannelCongestion]", + ".batchCreationComplete" => "task with id X of type `dumpCreation` cannot be batched" }, @r###" { @@ -1339,7 +1328,7 @@ async fn test_summarized_dump_creation() { "duration": "[duration]", "startedAt": "[date]", "finishedAt": "[date]", - "batchCreationComplete": "task with id 0 of type `dumpCreation` cannot be batched" + "batchCreationComplete": "task with id X of type `dumpCreation` cannot be batched" } "###); }