3023: Update error codes related to tasks cancelation + add canceledBy filter r=Kerollmops a=Kerollmops

<details>This PR changes the error codes [to follow the specification](https://github.com/meilisearch/specifications/pull/195).

 - [x] The `missing_filters` error code is renamed `missing_task_filters` to be more accurate and follow the `invalid_task_*` convention.
 - [x] The error code `invalid_task_uids_filter` is added.
 - [x] The error code `invalid_task_canceled_by_filter` is added.
 - [x] The error code `invalid_task_date_filter` is added.
      -  The error message is the same as for expires_at in the API Key  EXCEPT that it does not explicitly mention that a date must be given in the future.
</details>

Edit by `@loiclec` :
I have added a few more changes into this PR. The related issues are:

- Fixes https://github.com/meilisearch/meilisearch/issues/3029
- Implements https://github.com/meilisearch/meilisearch/issues/3026
- Fixes https://github.com/meilisearch/meilisearch/issues/2940
- Fixes https://github.com/meilisearch/meilisearch/issues/2939

Additionally:
- Fixes a bug where global tasks were returned by `GET /tasks` queries even if the user did not have the `index.*` API key action.
- Rename `originalQuery` to `originalFilters`
- Display `error: null` and `canceledBy: null` in the task views
- Allow using the star operator in the task filters in the `DELETE /tasks` and `POST /tasks/cancel` routes
- Make sure that the index scheduler keeps making progress even when a grave error occurs.


Co-authored-by: Kerollmops <clement@meilisearch.com>
Co-authored-by: Clément Renault <clement@meilisearch.com>
Co-authored-by: Loïc Lecrenier <loic.lecrenier@me.com>
This commit is contained in:
bors[bot]
2022-11-10 10:51:41 +00:00
committed by GitHub
90 changed files with 1129 additions and 362 deletions

View File

@ -106,17 +106,17 @@ impl Index<'_> {
}
pub async fn list_tasks(&self) -> (Value, StatusCode) {
let url = format!("/tasks?indexUid={}", self.uid);
let url = format!("/tasks?indexUids={}", self.uid);
self.service.get(url).await
}
pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) {
let mut url = format!("/tasks?indexUid={}", self.uid);
let mut url = format!("/tasks?indexUids={}", self.uid);
if !type_.is_empty() {
let _ = write!(url, "&type={}", type_.join(","));
let _ = write!(url, "&types={}", type_.join(","));
}
if !status.is_empty() {
let _ = write!(url, "&status={}", status.join(","));
let _ = write!(url, "&statuses={}", status.join(","));
}
self.service.get(url).await
}

View File

@ -59,7 +59,7 @@ async fn import_dump_v2_movie_raw() {
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -122,7 +122,7 @@ async fn import_dump_v2_movie_with_settings() {
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -185,7 +185,7 @@ async fn import_dump_v2_rubygems_with_settings() {
assert_eq!(code, 200);
assert_eq!(
tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id
@ -246,7 +246,7 @@ async fn import_dump_v3_movie_raw() {
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -309,7 +309,7 @@ async fn import_dump_v3_movie_with_settings() {
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can["results"] still get a few documents by id
@ -372,7 +372,7 @@ async fn import_dump_v3_rubygems_with_settings() {
assert_eq!(code, 200);
assert_eq!(
tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id
@ -433,7 +433,7 @@ async fn import_dump_v4_movie_raw() {
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null })
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -496,7 +496,7 @@ async fn import_dump_v4_movie_with_settings() {
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -559,7 +559,7 @@ async fn import_dump_v4_rubygems_with_settings() {
assert_eq!(code, 200);
assert_eq!(
tasks["results"][0],
json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id

View File

@ -67,37 +67,37 @@ async fn list_tasks_with_star_filters() {
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.service.get("/tasks?indexUid=test").await;
let (response, code) = index.service.get("/tasks?indexUids=test").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?indexUid=*").await;
let (response, code) = index.service.get("/tasks?indexUids=*").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?indexUid=*,pasteque").await;
let (response, code) = index.service.get("/tasks?indexUids=*,pasteque").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?type=*").await;
let (response, code) = index.service.get("/tasks?types=*").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) =
index.service.get("/tasks?type=*,documentAdditionOrUpdate&status=*").await;
index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test")
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test,*")
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
@ -231,10 +231,12 @@ async fn test_summarized_document_addition_or_update() {
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -253,10 +255,12 @@ async fn test_summarized_document_addition_or_update() {
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -280,6 +284,7 @@ async fn test_summarized_delete_batch() {
"indexUid": "test",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
"details": {
"matchedDocuments": 3,
"deletedDocuments": null
@ -309,10 +314,12 @@ async fn test_summarized_delete_batch() {
"indexUid": "test",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
"details": {
"matchedDocuments": 1,
"deletedDocuments": 0
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -336,6 +343,7 @@ async fn test_summarized_delete_document() {
"indexUid": "test",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
"details": {
"matchedDocuments": 1,
"deletedDocuments": null
@ -365,10 +373,12 @@ async fn test_summarized_delete_document() {
"indexUid": "test",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
"details": {
"matchedDocuments": 1,
"deletedDocuments": 0
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -394,6 +404,7 @@ async fn test_summarized_settings_update() {
"indexUid": "test",
"status": "failed",
"type": "settingsUpdate",
"canceledBy": null,
"details": {
"rankingRules": [
"custom"
@ -423,6 +434,7 @@ async fn test_summarized_settings_update() {
"indexUid": "test",
"status": "succeeded",
"type": "settingsUpdate",
"canceledBy": null,
"details": {
"displayedAttributes": [
"doggos",
@ -436,6 +448,7 @@ async fn test_summarized_settings_update() {
"iq"
]
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -459,9 +472,11 @@ async fn test_summarized_index_creation() {
"indexUid": "test",
"status": "succeeded",
"type": "indexCreation",
"canceledBy": null,
"details": {
"primaryKey": null
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -480,6 +495,7 @@ async fn test_summarized_index_creation() {
"indexUid": "test",
"status": "failed",
"type": "indexCreation",
"canceledBy": null,
"details": {
"primaryKey": "doggos"
},
@ -512,6 +528,7 @@ async fn test_summarized_index_deletion() {
"indexUid": "test",
"status": "failed",
"type": "indexDeletion",
"canceledBy": null,
"error": {
"message": "Index `test` not found.",
"code": "index_not_found",
@ -538,9 +555,11 @@ async fn test_summarized_index_deletion() {
"indexUid": "test",
"status": "succeeded",
"type": "indexDeletion",
"canceledBy": null,
"details": {
"deletedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -560,9 +579,11 @@ async fn test_summarized_index_deletion() {
"indexUid": "test",
"status": "succeeded",
"type": "indexDeletion",
"canceledBy": null,
"details": {
"deletedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -587,6 +608,7 @@ async fn test_summarized_index_update() {
"indexUid": "test",
"status": "failed",
"type": "indexUpdate",
"canceledBy": null,
"details": {
"primaryKey": null
},
@ -614,6 +636,7 @@ async fn test_summarized_index_update() {
"indexUid": "test",
"status": "failed",
"type": "indexUpdate",
"canceledBy": null,
"details": {
"primaryKey": "bones"
},
@ -644,9 +667,11 @@ async fn test_summarized_index_update() {
"indexUid": "test",
"status": "succeeded",
"type": "indexUpdate",
"canceledBy": null,
"details": {
"primaryKey": null
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -665,9 +690,11 @@ async fn test_summarized_index_update() {
"indexUid": "test",
"status": "succeeded",
"type": "indexUpdate",
"canceledBy": null,
"details": {
"primaryKey": "bones"
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -694,6 +721,7 @@ async fn test_summarized_index_swap() {
"indexUid": null,
"status": "failed",
"type": "indexSwap",
"canceledBy": null,
"details": {
"swaps": [
{
@ -734,6 +762,7 @@ async fn test_summarized_index_swap() {
"indexUid": null,
"status": "succeeded",
"type": "indexSwap",
"canceledBy": null,
"details": {
"swaps": [
{
@ -744,6 +773,7 @@ async fn test_summarized_index_swap() {
}
]
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -759,7 +789,7 @@ async fn test_summarized_task_cancelation() {
// to avoid being flaky we're only going to cancel an already finished task :(
index.create(None).await;
index.wait_task(0).await;
server.cancel_task(json!({ "uid": [0] })).await;
server.cancel_task(json!({ "uids": [0] })).await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,
@ -770,11 +800,13 @@ async fn test_summarized_task_cancelation() {
"indexUid": null,
"status": "succeeded",
"type": "taskCancelation",
"canceledBy": null,
"details": {
"matchedTasks": 1,
"canceledTasks": 0,
"originalQuery": "uid=0"
"originalFilters": "uids=0"
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -790,7 +822,7 @@ async fn test_summarized_task_deletion() {
// to avoid being flaky we're only going to delete an already finished task :(
index.create(None).await;
index.wait_task(0).await;
server.delete_task(json!({ "uid": [0] })).await;
server.delete_task(json!({ "uids": [0] })).await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,
@ -801,11 +833,13 @@ async fn test_summarized_task_deletion() {
"indexUid": null,
"status": "succeeded",
"type": "taskDeletion",
"canceledBy": null,
"details": {
"matchedTasks": 1,
"deletedTasks": 1,
"originalQuery": "uid=0"
"originalFilters": "uids=0"
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
@ -828,6 +862,8 @@ async fn test_summarized_dump_creation() {
"indexUid": null,
"status": "succeeded",
"type": "dumpCreation",
"canceledBy": null,
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",