mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-22 04:36:32 +00:00
Merge pull request #5999 from meilisearch/fix-document-fetch-sort
Fix the Document Fetch pagination bug when Sort is applied
This commit is contained in:
@@ -1339,3 +1339,117 @@ async fn get_document_with_vectors() {
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_fetch_documents_pagination_with_sorting() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _code) = index.create(None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Set name as sortable attribute
|
||||
let (task, code) = index.update_settings_sortable_attributes(json!(["name"])).await;
|
||||
assert_eq!(code, 202);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let documents = json!((0..50)
|
||||
.map(|i| json!({"id": i, "name": format!("doc_{:05}", std::cmp::min(i, 5))}))
|
||||
.collect::<Vec<_>>());
|
||||
|
||||
// Add documents as described in the bug report
|
||||
let (task, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Request 1 (first page): offset 0, limit 2
|
||||
let (response, code) = index
|
||||
.fetch_documents(json!({
|
||||
"offset": 0,
|
||||
"limit": 2,
|
||||
"sort": ["name:asc"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let results = response["results"].as_array().unwrap();
|
||||
snapshot!(json_string!(results), @r###"
|
||||
[
|
||||
{
|
||||
"id": 0,
|
||||
"name": "doc_00000"
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"name": "doc_00001"
|
||||
}
|
||||
]
|
||||
"###);
|
||||
|
||||
// Request 2 (second page): offset 2, limit 2
|
||||
let (response, code) = index
|
||||
.fetch_documents(json!({
|
||||
"offset": 2,
|
||||
"limit": 2,
|
||||
"sort": ["name:asc"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let results = response["results"].as_array().unwrap();
|
||||
snapshot!(json_string!(results), @r###"
|
||||
[
|
||||
{
|
||||
"id": 2,
|
||||
"name": "doc_00002"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "doc_00003"
|
||||
}
|
||||
]
|
||||
"###);
|
||||
|
||||
// Request 3 (third page): offset 4, limit 2
|
||||
let (response, code) = index
|
||||
.fetch_documents(json!({
|
||||
"offset": 4,
|
||||
"limit": 2,
|
||||
"sort": ["name:asc"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let results = response["results"].as_array().unwrap();
|
||||
snapshot!(json_string!(results), @r###"
|
||||
[
|
||||
{
|
||||
"id": 4,
|
||||
"name": "doc_00004"
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"name": "doc_00005"
|
||||
}
|
||||
]
|
||||
"###);
|
||||
|
||||
// Request 4 (fourth page): offset 6, limit 2
|
||||
let (response, code) = index
|
||||
.fetch_documents(json!({
|
||||
"offset": 6,
|
||||
"limit": 2,
|
||||
"sort": ["name:asc"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let results = response["results"].as_array().unwrap();
|
||||
snapshot!(json_string!(results), @r###"
|
||||
[
|
||||
{
|
||||
"id": 6,
|
||||
"name": "doc_00005"
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"name": "doc_00005"
|
||||
}
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ impl Iterator for SortedDocumentsIterator<'_> {
|
||||
};
|
||||
|
||||
// Otherwise don't directly iterate over children, skip them if we know we will go further
|
||||
let mut to_skip = n - 1;
|
||||
let mut to_skip = n;
|
||||
while to_skip > 0 {
|
||||
if let Err(e) = SortedDocumentsIterator::update_current(
|
||||
current_child,
|
||||
@@ -108,7 +108,7 @@ impl Iterator for SortedDocumentsIterator<'_> {
|
||||
continue;
|
||||
} else {
|
||||
// The current iterator is large enough, so we can forward the call to it.
|
||||
return inner.nth(to_skip + 1);
|
||||
return inner.nth(to_skip);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user