mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-10 22:55:43 +00:00
Compare commits
18 Commits
v1.12.0-rc
...
more-effic
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1cf14e765f | ||
|
|
4a082683df | ||
|
|
26be5e0733 | ||
|
|
bd5110a2fe | ||
|
|
fa8b9acdf6 | ||
|
|
2b74d1824b | ||
|
|
c77b00d3ac | ||
|
|
c77073efcc | ||
|
|
1537323eb9 | ||
|
|
a0a3b55700 | ||
|
|
214b51de87 | ||
|
|
95975944d7 | ||
|
|
7a2af06b1e | ||
|
|
cb0c3a5aad | ||
|
|
cbcf6c9ba3 | ||
|
|
bf742d81cf | ||
|
|
fc1df5793c | ||
|
|
953a82ca04 |
@@ -497,7 +497,6 @@ impl IndexScheduler {
|
|||||||
// 5. We make a batch from the unprioritised tasks. Start by taking the next enqueued task.
|
// 5. We make a batch from the unprioritised tasks. Start by taking the next enqueued task.
|
||||||
let task_id = if let Some(task_id) = enqueued.min() { task_id } else { return Ok(None) };
|
let task_id = if let Some(task_id) = enqueued.min() { task_id } else { return Ok(None) };
|
||||||
let mut task = self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
let mut task = self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
current_batch.processing(Some(&mut task));
|
|
||||||
|
|
||||||
// If the task is not associated with any index, verify that it is an index swap and
|
// If the task is not associated with any index, verify that it is an index swap and
|
||||||
// create the batch directly. Otherwise, get the index name associated with the task
|
// create the batch directly. Otherwise, get the index name associated with the task
|
||||||
@@ -507,6 +506,7 @@ impl IndexScheduler {
|
|||||||
index_name
|
index_name
|
||||||
} else {
|
} else {
|
||||||
assert!(matches!(&task.kind, KindWithContent::IndexSwap { swaps } if swaps.is_empty()));
|
assert!(matches!(&task.kind, KindWithContent::IndexSwap { swaps } if swaps.is_empty()));
|
||||||
|
current_batch.processing(Some(&mut task));
|
||||||
return Ok(Some((Batch::IndexSwap { task }, current_batch)));
|
return Ok(Some((Batch::IndexSwap { task }, current_batch)));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -4319,10 +4319,35 @@ mod tests {
|
|||||||
let proc = index_scheduler.processing_tasks.read().unwrap().clone();
|
let proc = index_scheduler.processing_tasks.read().unwrap().clone();
|
||||||
|
|
||||||
let query = Query { statuses: Some(vec![Status::Processing]), ..Default::default() };
|
let query = Query { statuses: Some(vec![Status::Processing]), ..Default::default() };
|
||||||
let (batches, _) = index_scheduler
|
let (mut batches, _) = index_scheduler
|
||||||
.get_batch_ids_from_authorized_indexes(&rtxn, &proc, &query, &AuthFilter::default())
|
.get_batches_from_authorized_indexes(query.clone(), &AuthFilter::default())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
snapshot!(snapshot_bitmap(&batches), @"[0,]"); // only the processing batch in the first tick
|
assert_eq!(batches.len(), 1);
|
||||||
|
batches[0].started_at = OffsetDateTime::UNIX_EPOCH;
|
||||||
|
// Insta cannot snapshot our batches because the batch stats contains an enum as key: https://github.com/mitsuhiko/insta/issues/689
|
||||||
|
let batch = serde_json::to_string_pretty(&batches[0]).unwrap();
|
||||||
|
snapshot!(batch, @r#"
|
||||||
|
{
|
||||||
|
"uid": 0,
|
||||||
|
"details": {
|
||||||
|
"primaryKey": "mouse"
|
||||||
|
},
|
||||||
|
"stats": {
|
||||||
|
"totalNbTasks": 1,
|
||||||
|
"status": {
|
||||||
|
"processing": 1
|
||||||
|
},
|
||||||
|
"types": {
|
||||||
|
"indexCreation": 1
|
||||||
|
},
|
||||||
|
"indexUids": {
|
||||||
|
"catto": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"startedAt": "1970-01-01T00:00:00Z",
|
||||||
|
"finishedAt": null
|
||||||
|
}
|
||||||
|
"#);
|
||||||
|
|
||||||
let query = Query { statuses: Some(vec![Status::Enqueued]), ..Default::default() };
|
let query = Query { statuses: Some(vec![Status::Enqueued]), ..Default::default() };
|
||||||
let (batches, _) = index_scheduler
|
let (batches, _) = index_scheduler
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(1):
|
### Processing batch Some(1):
|
||||||
[1,]
|
[1,]
|
||||||
{uid: 1, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"beavero":2}}, }
|
{uid: 1, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"beavero":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(1):
|
### Processing batch Some(1):
|
||||||
[1,]
|
[1,]
|
||||||
{uid: 1, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"beavero":2}}, }
|
{uid: 1, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"beavero":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"dumpUid":null}, stats: {"totalNbTasks":1,"status":{"enqueued":1},"types":{"dumpCreation":1},"indexUids":{}}, }
|
{uid: 0, details: {"dumpUid":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"dumpCreation":1},"indexUids":{}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"catto":2}}, }
|
{uid: 0, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"catto":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"catto":2}}, }
|
{uid: 0, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"catto":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"catto":2}}, }
|
{uid: 0, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"catto":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"doggos":2}}, }
|
{uid: 0, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"doggos":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"doggos":2}}, }
|
{uid: 0, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"doggos":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"doggos":2}}, }
|
{uid: 0, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"doggos":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"receivedDocuments":2,"indexedDocuments":null}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"documentAdditionOrUpdate":2},"indexUids":{"doggos":2}}, }
|
{uid: 0, details: {"receivedDocuments":1,"indexedDocuments":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"doggos":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"indexCreation":2},"indexUids":{"index_a":2}}, }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"indexCreation":2},"indexUids":{"index_a":2}}, }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"indexCreation":2},"indexUids":{"index_a":2}}, }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(1):
|
### Processing batch Some(1):
|
||||||
[1,]
|
[1,]
|
||||||
{uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":2,"status":{"enqueued":2},"types":{"indexCreation":2},"indexUids":{"doggo":2}}, }
|
{uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ snapshot_kind: text
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[3,]
|
[3,]
|
||||||
{uid: 0, details: {"matchedTasks":2,"deletedTasks":null,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"enqueued":1},"types":{"taskDeletion":1},"indexUids":{}}, }
|
{uid: 0, details: {"matchedTasks":2,"deletedTasks":null,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"taskDeletion":1},"indexUids":{}}, }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ impl ProcessingBatch {
|
|||||||
task.batch_uid = Some(self.uid);
|
task.batch_uid = Some(self.uid);
|
||||||
// We don't store the statuses in the map since they're all enqueued but we must
|
// We don't store the statuses in the map since they're all enqueued but we must
|
||||||
// still store them in the stats since that can be displayed.
|
// still store them in the stats since that can be displayed.
|
||||||
*self.stats.status.entry(task.status).or_default() += 1;
|
*self.stats.status.entry(Status::Processing).or_default() += 1;
|
||||||
|
|
||||||
self.kinds.insert(task.kind.as_kind());
|
self.kinds.insert(task.kind.as_kind());
|
||||||
*self.stats.types.entry(task.kind.as_kind()).or_default() += 1;
|
*self.stats.types.entry(task.kind.as_kind()).or_default() += 1;
|
||||||
|
|||||||
@@ -279,6 +279,7 @@ InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidFacetSearchQuery , InvalidRequest , BAD_REQUEST ;
|
InvalidFacetSearchQuery , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidFacetSearchName , InvalidRequest , BAD_REQUEST ;
|
InvalidFacetSearchName , InvalidRequest , BAD_REQUEST ;
|
||||||
|
FacetSearchDisabled , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchVector , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchVector , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchShowRankingScore , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchShowRankingScore , InvalidRequest , BAD_REQUEST ;
|
||||||
|
|||||||
@@ -1407,6 +1407,13 @@ pub fn perform_facet_search(
|
|||||||
None => TimeBudget::default(),
|
None => TimeBudget::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if !index.facet_search(&rtxn)? {
|
||||||
|
return Err(ResponseError::from_msg(
|
||||||
|
"The facet search is disabled for this index".to_string(),
|
||||||
|
Code::FacetSearchDisabled,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
// In the faceted search context, we want to use the intersection between the locales provided by the user
|
// In the faceted search context, we want to use the intersection between the locales provided by the user
|
||||||
// and the locales of the facet string.
|
// and the locales of the facet string.
|
||||||
// If the facet string is not localized, we **ignore** the locales provided by the user because the facet data has no locale.
|
// If the facet string is not localized, we **ignore** the locales provided by the user because the facet data has no locale.
|
||||||
|
|||||||
@@ -52,6 +52,25 @@ impl Value {
|
|||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return `true` if the `status` field is set to `failed`.
|
||||||
|
/// Panic if the `status` field doesn't exists.
|
||||||
|
#[track_caller]
|
||||||
|
pub fn is_fail(&self) -> bool {
|
||||||
|
if !self["status"].is_string() {
|
||||||
|
panic!("Called `is_fail` on {}", serde_json::to_string_pretty(&self.0).unwrap());
|
||||||
|
}
|
||||||
|
self["status"] == serde_json::Value::String(String::from("failed"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Panic if the json doesn't contain the `status` field set to "succeeded"
|
||||||
|
#[track_caller]
|
||||||
|
pub fn failed(&self) -> &Self {
|
||||||
|
if !self.is_fail() {
|
||||||
|
panic!("Called failed on {}", serde_json::to_string_pretty(&self.0).unwrap());
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<serde_json::Value> for Value {
|
impl From<serde_json::Value> for Value {
|
||||||
|
|||||||
@@ -221,8 +221,15 @@ async fn add_documents_and_deactivate_facet_search() {
|
|||||||
let (response, code) =
|
let (response, code) =
|
||||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||||
|
|
||||||
assert_eq!(code, 200, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 0);
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The facet search is disabled for this index",
|
||||||
|
"code": "facet_search_disabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#facet_search_disabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -245,8 +252,15 @@ async fn deactivate_facet_search_and_add_documents() {
|
|||||||
let (response, code) =
|
let (response, code) =
|
||||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||||
|
|
||||||
assert_eq!(code, 200, "{}", response);
|
assert_eq!(code, 400, "{}", response);
|
||||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 0);
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The facet search is disabled for this index",
|
||||||
|
"code": "facet_search_disabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#facet_search_disabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
@@ -129,11 +129,11 @@ async fn perform_on_demand_snapshot() {
|
|||||||
|
|
||||||
index.load_test_set().await;
|
index.load_test_set().await;
|
||||||
|
|
||||||
server.index("doggo").create(Some("bone")).await;
|
let (task, _) = server.index("doggo").create(Some("bone")).await;
|
||||||
index.wait_task(2).await;
|
index.wait_task(task.uid()).await.succeeded();
|
||||||
|
|
||||||
server.index("doggo").create(Some("bone")).await;
|
let (task, _) = server.index("doggo").create(Some("bone")).await;
|
||||||
index.wait_task(2).await;
|
index.wait_task(task.uid()).await.failed();
|
||||||
|
|
||||||
let (task, code) = server.create_snapshot().await;
|
let (task, code) = server.create_snapshot().await;
|
||||||
snapshot!(code, @"202 Accepted");
|
snapshot!(code, @"202 Accepted");
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
|
|
||||||
|
use either::Either;
|
||||||
use heed::RoTxn;
|
use heed::RoTxn;
|
||||||
use raw_collections::RawMap;
|
use raw_collections::RawMap;
|
||||||
use serde_json::value::RawValue;
|
use serde_json::value::RawValue;
|
||||||
@@ -209,29 +210,34 @@ impl<'d, 'doc: 'd, 't: 'd, Mapper: FieldIdMapper> Document<'d>
|
|||||||
for MergedDocument<'d, 'doc, 't, Mapper>
|
for MergedDocument<'d, 'doc, 't, Mapper>
|
||||||
{
|
{
|
||||||
fn iter_top_level_fields(&self) -> impl Iterator<Item = Result<(&'d str, &'d RawValue)>> {
|
fn iter_top_level_fields(&self) -> impl Iterator<Item = Result<(&'d str, &'d RawValue)>> {
|
||||||
let mut new_doc_it = self.new_doc.iter_top_level_fields();
|
match &self.db {
|
||||||
let mut db_it = self.db.iter().flat_map(|db| db.iter_top_level_fields());
|
Some(db) => {
|
||||||
let mut seen_fields = BTreeSet::new();
|
let mut new_doc_it = self.new_doc.iter_top_level_fields();
|
||||||
|
let mut db_it = db.iter_top_level_fields();
|
||||||
|
let mut seen_fields = BTreeSet::new();
|
||||||
|
|
||||||
std::iter::from_fn(move || {
|
Either::Left(std::iter::from_fn(move || {
|
||||||
if let Some(next) = new_doc_it.next() {
|
if let Some(next) = new_doc_it.next() {
|
||||||
if let Ok((name, _)) = next {
|
if let Ok((name, _)) = next {
|
||||||
seen_fields.insert(name);
|
seen_fields.insert(name);
|
||||||
}
|
|
||||||
return Some(next);
|
|
||||||
}
|
|
||||||
loop {
|
|
||||||
match db_it.next()? {
|
|
||||||
Ok((name, value)) => {
|
|
||||||
if seen_fields.contains(name) {
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
return Some(Ok((name, value)));
|
return Some(next);
|
||||||
}
|
}
|
||||||
Err(err) => return Some(Err(err)),
|
loop {
|
||||||
}
|
match db_it.next()? {
|
||||||
|
Ok((name, value)) => {
|
||||||
|
if seen_fields.contains(name) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
return Some(Ok((name, value)));
|
||||||
|
}
|
||||||
|
Err(err) => return Some(Err(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
})
|
None => Either::Right(self.new_doc.iter_top_level_fields()),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn vectors_field(&self) -> Result<Option<&'d RawValue>> {
|
fn vectors_field(&self) -> Result<Option<&'d RawValue>> {
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
use bumpalo::Bump;
|
use bumpalo::Bump;
|
||||||
use heed::RoTxn;
|
use heed::RoTxn;
|
||||||
|
|
||||||
use super::document::{DocumentFromDb, DocumentFromVersions, MergedDocument, Versions};
|
use super::document::{
|
||||||
|
Document as _, DocumentFromDb, DocumentFromVersions, MergedDocument, Versions,
|
||||||
|
};
|
||||||
|
use super::extract::perm_json_p;
|
||||||
use super::vector_document::{
|
use super::vector_document::{
|
||||||
MergedVectorDocument, VectorDocumentFromDb, VectorDocumentFromVersions,
|
MergedVectorDocument, VectorDocumentFromDb, VectorDocumentFromVersions,
|
||||||
};
|
};
|
||||||
@@ -164,6 +167,80 @@ impl<'doc> Update<'doc> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns whether the updated version of the document is different from the current version for the passed subset of fields.
|
||||||
|
///
|
||||||
|
/// `true` if at least one top-level-field that is a exactly a member of field or a parent of a member of field changed.
|
||||||
|
/// Otherwise `false`.
|
||||||
|
pub fn has_changed_for_fields<'t, Mapper: FieldIdMapper>(
|
||||||
|
&self,
|
||||||
|
fields: Option<&[&str]>,
|
||||||
|
rtxn: &'t RoTxn,
|
||||||
|
index: &'t Index,
|
||||||
|
mapper: &'t Mapper,
|
||||||
|
) -> Result<bool> {
|
||||||
|
let mut changed = false;
|
||||||
|
let mut cached_current = None;
|
||||||
|
let mut updated_selected_field_count = 0;
|
||||||
|
|
||||||
|
for entry in self.updated().iter_top_level_fields() {
|
||||||
|
let (key, updated_value) = entry?;
|
||||||
|
|
||||||
|
if perm_json_p::select_field(key, fields, &[]) == perm_json_p::Selection::Skip {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
updated_selected_field_count += 1;
|
||||||
|
let current = match cached_current {
|
||||||
|
Some(current) => current,
|
||||||
|
None => self.current(rtxn, index, mapper)?,
|
||||||
|
};
|
||||||
|
let current_value = current.top_level_field(key)?;
|
||||||
|
let Some(current_value) = current_value else {
|
||||||
|
changed = true;
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
|
||||||
|
if current_value.get() != updated_value.get() {
|
||||||
|
changed = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
cached_current = Some(current);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.has_deletion {
|
||||||
|
// no field deletion, so fields that don't appear in `updated` cannot have changed
|
||||||
|
return Ok(changed);
|
||||||
|
}
|
||||||
|
|
||||||
|
if changed {
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// we saw all updated fields, and set `changed` if any field wasn't in `current`.
|
||||||
|
// so if there are as many fields in `current` as in `updated`, then nothing changed.
|
||||||
|
// If there is any more fields in `current`, then they are missing in `updated`.
|
||||||
|
let has_deleted_fields = {
|
||||||
|
let current = match cached_current {
|
||||||
|
Some(current) => current,
|
||||||
|
None => self.current(rtxn, index, mapper)?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut current_selected_field_count = 0;
|
||||||
|
for entry in current.iter_top_level_fields() {
|
||||||
|
let (key, _) = entry?;
|
||||||
|
|
||||||
|
if perm_json_p::select_field(key, fields, &[]) == perm_json_p::Selection::Skip {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
current_selected_field_count += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
current_selected_field_count != updated_selected_field_count
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(has_deleted_fields)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn updated_vectors(
|
pub fn updated_vectors(
|
||||||
&self,
|
&self,
|
||||||
doc_alloc: &'doc Bump,
|
doc_alloc: &'doc Bump,
|
||||||
|
|||||||
@@ -97,6 +97,15 @@ impl FacetedDocidsExtractor {
|
|||||||
},
|
},
|
||||||
),
|
),
|
||||||
DocumentChange::Update(inner) => {
|
DocumentChange::Update(inner) => {
|
||||||
|
if !inner.has_changed_for_fields(
|
||||||
|
Some(attributes_to_extract),
|
||||||
|
rtxn,
|
||||||
|
index,
|
||||||
|
context.db_fields_ids_map,
|
||||||
|
)? {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
extract_document_facets(
|
extract_document_facets(
|
||||||
attributes_to_extract,
|
attributes_to_extract,
|
||||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||||
|
|||||||
@@ -351,6 +351,15 @@ impl WordDocidsExtractors {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
DocumentChange::Update(inner) => {
|
DocumentChange::Update(inner) => {
|
||||||
|
if !inner.has_changed_for_fields(
|
||||||
|
document_tokenizer.attribute_to_extract,
|
||||||
|
&context.rtxn,
|
||||||
|
context.index,
|
||||||
|
context.db_fields_ids_map,
|
||||||
|
)? {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
let mut token_fn = |fname: &str, fid, pos, word: &str| {
|
let mut token_fn = |fname: &str, fid, pos, word: &str| {
|
||||||
cached_sorter.insert_del_u32(
|
cached_sorter.insert_del_u32(
|
||||||
fid,
|
fid,
|
||||||
|
|||||||
@@ -70,6 +70,15 @@ impl SearchableExtractor for WordPairProximityDocidsExtractor {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
DocumentChange::Update(inner) => {
|
DocumentChange::Update(inner) => {
|
||||||
|
if !inner.has_changed_for_fields(
|
||||||
|
document_tokenizer.attribute_to_extract,
|
||||||
|
rtxn,
|
||||||
|
index,
|
||||||
|
context.db_fields_ids_map,
|
||||||
|
)? {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
let document = inner.current(rtxn, index, context.db_fields_ids_map)?;
|
let document = inner.current(rtxn, index, context.db_fields_ids_map)?;
|
||||||
process_document_tokens(
|
process_document_tokens(
|
||||||
document,
|
document,
|
||||||
|
|||||||
Reference in New Issue
Block a user