Compare commits

...

17 Commits

Author SHA1 Message Date
Clément Renault
f4225164fa Merge pull request #6073 from meilisearch/update-version-v1.31.0
Update version for the next release (v1.31.0) in Cargo.toml
2025-12-22 13:33:28 +00:00
Kerollmops
ee9080280e Update version for the next release (v1.31.0) in Cargo.toml 2025-12-22 13:12:00 +00:00
Many the fish
92354d0e03 Merge pull request #6065 from meilisearch/tmp-release-v1.30.1
Bring changes from v1.30.1 into main
2025-12-22 11:27:44 +00:00
Kerollmops
dc7fca4b47 Skip search hugging face on Windows 2025-12-22 12:11:14 +01:00
Kerollmops
a2af104cd0 Fix flaky Ollama tests 2025-12-22 12:11:14 +01:00
Kerollmops
4c666e9774 Fix duplicate import 2025-12-18 18:17:34 +01:00
Kerollmops
7a7e75fc41 Merge remote-tracking branch 'origin/main' into tmp-release-v1.30.1 2025-12-18 17:57:48 +01:00
Clément Renault
9db2b16eed Merge pull request #6063 from meilisearch/bump-hannoy-with-fix
Bump hannoy to v0.1.2
2025-12-18 16:54:20 +00:00
Clément Renault
7d0633df22 Merge pull request #6064 from meilisearch/hotfix-weightmap-missing-entry 2025-12-18 17:28:33 +01:00
Clément Renault
055c65211f Merge pull request #6059 from meilisearch/exp-806-index-deletion-removed-more-tasks-that-it-should-have-after 2025-12-18 17:21:47 +01:00
ManyTheFish
ae77ca9033 Update Meilisaerch version to v1.30.1 2025-12-18 17:00:41 +01:00
ManyTheFish
2138504df9 Hotfix: log a warning instead of returning an internal error when FieldidsWeightsMap is missing an entry 2025-12-18 16:57:40 +01:00
Kerollmops
2ba3fafcc3 Bump hannoy to v0.1.2 2025-12-18 16:55:30 +01:00
YoEight
614affd0b1 Fix task attribution during index swap to prevent cross-index task loss 2025-12-18 10:52:17 -05:00
Clémentine
8582975fc5 Make CI test title more human friendly 2025-12-18 16:34:03 +01:00
Clément Renault
14db3dbcc4 Merge pull request #6062 from meilisearch/curquiza-patch-1
Make CI test titles more human friendly
2025-12-18 15:32:23 +00:00
Clémentine
a61ef955fc Make CI test title more human friendly 2025-12-18 14:58:06 +01:00
13 changed files with 214 additions and 110 deletions

View File

@@ -15,7 +15,7 @@ env:
jobs:
test-linux:
name: Tests on Ubuntu
name: Tests on ${{ matrix.runner }} ${{ matrix.features }}
runs-on: ${{ matrix.runner }}
strategy:
matrix:

38
Cargo.lock generated
View File

@@ -580,7 +580,7 @@ source = "git+https://github.com/meilisearch/bbqueue#e8af4a4bccc8eb36b2b0442c4a9
[[package]]
name = "benchmarks"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"anyhow",
"bumpalo",
@@ -790,7 +790,7 @@ dependencies = [
[[package]]
name = "build-info"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"anyhow",
"time",
@@ -1786,7 +1786,7 @@ dependencies = [
[[package]]
name = "dump"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"anyhow",
"big_s",
@@ -2018,7 +2018,7 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "file-store"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"tempfile",
"thiserror 2.0.17",
@@ -2040,7 +2040,7 @@ dependencies = [
[[package]]
name = "filter-parser"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"insta",
"levenshtein_automata",
@@ -2068,7 +2068,7 @@ dependencies = [
[[package]]
name = "flatten-serde-json"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"criterion",
"serde_json",
@@ -2231,7 +2231,7 @@ dependencies = [
[[package]]
name = "fuzzers"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"arbitrary",
"bumpalo",
@@ -2698,9 +2698,9 @@ dependencies = [
[[package]]
name = "hannoy"
version = "0.1.0-nested-rtxns"
version = "0.1.2-nested-rtxns"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be82bf3f2108ddc8885e3d306fcd7f4692066bfe26065ca8b42ba417f3c26dd1"
checksum = "533c952127a7e73448f26af313ac7b98012516561e48e953781cd6b30e573436"
dependencies = [
"bytemuck",
"byteorder",
@@ -3185,7 +3185,7 @@ dependencies = [
[[package]]
name = "index-scheduler"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"anyhow",
"backoff",
@@ -3449,7 +3449,7 @@ dependencies = [
[[package]]
name = "json-depth-checker"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"criterion",
"serde_json",
@@ -3939,7 +3939,7 @@ checksum = "ae960838283323069879657ca3de837e9f7bbb4c7bf6ea7f1b290d5e9476d2e0"
[[package]]
name = "meili-snap"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"insta",
"md5 0.8.0",
@@ -3950,7 +3950,7 @@ dependencies = [
[[package]]
name = "meilisearch"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"actix-cors",
"actix-http",
@@ -4048,7 +4048,7 @@ dependencies = [
[[package]]
name = "meilisearch-auth"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"base64 0.22.1",
"enum-iterator",
@@ -4067,7 +4067,7 @@ dependencies = [
[[package]]
name = "meilisearch-types"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"actix-web",
"anyhow",
@@ -4105,7 +4105,7 @@ dependencies = [
[[package]]
name = "meilitool"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"anyhow",
"clap",
@@ -4139,7 +4139,7 @@ dependencies = [
[[package]]
name = "milli"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"arroy",
"bbqueue",
@@ -4718,7 +4718,7 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "permissive-json-pointer"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"big_s",
"serde_json",
@@ -7758,7 +7758,7 @@ dependencies = [
[[package]]
name = "xtask"
version = "1.30.0"
version = "1.31.0"
dependencies = [
"anyhow",
"build-info",

View File

@@ -23,7 +23,7 @@ members = [
]
[workspace.package]
version = "1.30.0"
version = "1.31.0"
authors = [
"Quentin de Quelen <quentin@dequelen.me>",
"Clément Renault <clement@meilisearch.com>",

View File

@@ -662,13 +662,8 @@ impl IndexScheduler {
// 2. Get the task set for index = name that appeared before the index swap task
let mut index_lhs_task_ids = self.queue.tasks.index_tasks(wtxn, lhs)?;
index_lhs_task_ids.remove_range(task_id..);
let index_rhs_task_ids = if rename {
let mut index_rhs_task_ids = self.queue.tasks.index_tasks(wtxn, rhs)?;
index_rhs_task_ids.remove_range(task_id..);
index_rhs_task_ids
} else {
RoaringBitmap::new()
};
let mut index_rhs_task_ids = self.queue.tasks.index_tasks(wtxn, rhs)?;
index_rhs_task_ids.remove_range(task_id..);
// 3. before_name -> new_name in the task's KindWithContent
progress.update_progress(InnerSwappingTwoIndexes::UpdateTheTasks);

View File

@@ -7,9 +7,9 @@ source: crates/index-scheduler/src/scheduler/test.rs
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "a", primary_key: Some("id") }}
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
3 {uid: 3, batch_uid: 3, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
3 {uid: 3, batch_uid: 3, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }}
4 {uid: 4, batch_uid: 4, status: succeeded, details: { swaps: [IndexSwap { indexes: ("a", "b"), rename: false }, IndexSwap { indexes: ("c", "d"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b"), rename: false }, IndexSwap { indexes: ("c", "d"), rename: false }] }}
5 {uid: 5, status: enqueued, details: { swaps: [IndexSwap { indexes: ("a", "c"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "c"), rename: false }] }}
----------------------------------------------------------------------
@@ -22,10 +22,10 @@ succeeded [0,1,2,3,4,]
"indexSwap" [4,5,]
----------------------------------------------------------------------
### Index Tasks:
a [4,5,]
b [0,1,4,]
c [4,5,]
d [2,3,4,]
a [1,4,5,]
b [0,4,]
c [3,4,5,]
d [2,4,]
----------------------------------------------------------------------
### Index Mapper:
a: { number_of_documents: 0, field_distribution: {} }

View File

@@ -7,9 +7,9 @@ source: crates/index-scheduler/src/scheduler/test.rs
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }}
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
3 {uid: 3, batch_uid: 3, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
3 {uid: 3, batch_uid: 3, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "a", primary_key: Some("id") }}
4 {uid: 4, batch_uid: 4, status: succeeded, details: { swaps: [IndexSwap { indexes: ("c", "b"), rename: false }, IndexSwap { indexes: ("a", "d"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("c", "b"), rename: false }, IndexSwap { indexes: ("a", "d"), rename: false }] }}
5 {uid: 5, batch_uid: 5, status: succeeded, details: { swaps: [IndexSwap { indexes: ("a", "c"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "c"), rename: false }] }}
----------------------------------------------------------------------
@@ -22,10 +22,10 @@ succeeded [0,1,2,3,4,5,]
"indexSwap" [4,5,]
----------------------------------------------------------------------
### Index Tasks:
a [5,]
b [0,1,4,]
c [4,5,]
d [2,3,4,]
a [3,4,5,]
b [0,4,]
c [1,4,5,]
d [2,4,]
----------------------------------------------------------------------
### Index Mapper:
a: { number_of_documents: 0, field_distribution: {} }

View File

@@ -7,9 +7,9 @@ source: crates/index-scheduler/src/scheduler/test.rs
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }}
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
3 {uid: 3, batch_uid: 3, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
3 {uid: 3, batch_uid: 3, status: succeeded, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "a", primary_key: Some("id") }}
4 {uid: 4, batch_uid: 4, status: succeeded, details: { swaps: [IndexSwap { indexes: ("c", "b"), rename: false }, IndexSwap { indexes: ("a", "d"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("c", "b"), rename: false }, IndexSwap { indexes: ("a", "d"), rename: false }] }}
5 {uid: 5, batch_uid: 5, status: succeeded, details: { swaps: [IndexSwap { indexes: ("a", "c"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "c"), rename: false }] }}
6 {uid: 6, batch_uid: 6, status: succeeded, details: { swaps: [] }, kind: IndexSwap { swaps: [] }}
@@ -23,10 +23,10 @@ succeeded [0,1,2,3,4,5,6,]
"indexSwap" [4,5,6,]
----------------------------------------------------------------------
### Index Tasks:
a [5,]
b [0,1,4,]
c [4,5,]
d [2,3,4,]
a [3,4,5,]
b [0,4,]
c [1,4,5,]
d [2,4,]
----------------------------------------------------------------------
### Index Mapper:
a: { number_of_documents: 0, field_distribution: {} }

View File

@@ -5,7 +5,7 @@ use crate::test_utils::Breakpoint::*;
use crate::test_utils::{
index_creation_task, read_json, replace_document_import_task, sample_documents,
};
use crate::IndexScheduler;
use crate::{IndexScheduler, Query};
use big_s::S;
use meili_snap::{json_string, snapshot};
use meilisearch_auth::AuthFilter;
@@ -404,6 +404,103 @@ fn swap_indexes() {
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_empty_swap_processed");
}
#[test]
fn swap_indexes_with_correct_task_allocations() {
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
let to_enqueue = [index_creation_task("a", "id"), index_creation_task("b", "id")];
for task in to_enqueue {
let _ = index_scheduler.register(task, None, false).unwrap();
index_scheduler.assert_internally_consistent();
}
handle.advance_n_successful_batches(2);
let (file0, count0) = sample_documents(&index_scheduler, 1, 1);
let (file1, count1) = sample_documents(&index_scheduler, 2, 2);
let (file2, count2) = sample_documents(&index_scheduler, 3, 3);
let (file3, count3) = sample_documents(&index_scheduler, 4, 4);
file0.persist().unwrap();
file1.persist().unwrap();
file2.persist().unwrap();
file3.persist().unwrap();
index_scheduler
.register(replace_document_import_task("a", Some("id"), 1, count0), None, false)
.unwrap();
index_scheduler
.register(replace_document_import_task("a", Some("id"), 2, count1), None, false)
.unwrap();
index_scheduler
.register(replace_document_import_task("b", Some("id"), 3, count2), None, false)
.unwrap();
index_scheduler
.register(replace_document_import_task("b", Some("id"), 4, count3), None, false)
.unwrap();
handle.advance_n_successful_batches(2);
let (a_tasks, _) = index_scheduler
.get_tasks_from_authorized_indexes(
&Query { index_uids: Some(vec!["a".to_string()]), ..Default::default() },
&AuthFilter::default(),
)
.unwrap();
assert_eq!(a_tasks.len(), 3);
let (b_tasks, _) = index_scheduler
.get_tasks_from_authorized_indexes(
&Query { index_uids: Some(vec!["b".to_string()]), ..Default::default() },
&AuthFilter::default(),
)
.unwrap();
assert_eq!(b_tasks.len(), 3);
index_scheduler
.register(
KindWithContent::IndexSwap {
swaps: vec![IndexSwap { indexes: ("a".to_owned(), "b".to_owned()), rename: false }],
},
None,
false,
)
.unwrap();
handle.advance_one_successful_batch();
let (a_after_tasks, _) = index_scheduler
.get_tasks_from_authorized_indexes(
&Query { index_uids: Some(vec!["a".to_string()]), ..Default::default() },
&AuthFilter::default(),
)
.unwrap();
let (b_after_tasks, _) = index_scheduler
.get_tasks_from_authorized_indexes(
&Query { index_uids: Some(vec!["b".to_string()]), ..Default::default() },
&AuthFilter::default(),
)
.unwrap();
assert_eq!(a_after_tasks.len(), 3);
assert_eq!(a_after_tasks.len(), b_after_tasks.len());
for (a, b) in a_tasks.iter().zip(b_after_tasks.iter()) {
assert_eq!(a.uid, b.uid);
}
for (b, a) in b_tasks.iter().zip(a_after_tasks.iter()) {
assert_eq!(b.uid, a.uid);
}
}
#[test]
fn swap_indexes_errors() {
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);

View File

@@ -452,6 +452,7 @@ async fn limit_offset() {
}
#[actix_rt::test]
#[cfg(not(windows))]
async fn simple_search_hf() {
let server = Server::new_shared();
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;

View File

@@ -103,7 +103,7 @@ async fn swap_indexes() {
{
"uid": 1,
"batchUid": 1,
"indexUid": "b",
"indexUid": "a",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
@@ -266,7 +266,7 @@ async fn swap_indexes() {
{
"uid": 4,
"batchUid": 4,
"indexUid": "d",
"indexUid": "c",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
@@ -341,7 +341,7 @@ async fn swap_indexes() {
{
"uid": 0,
"batchUid": 0,
"indexUid": "b",
"indexUid": "a",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,

View File

@@ -274,19 +274,19 @@ async fn test_both_apis() {
"birthyear": 2011,
"breed": "Beagle"
},
{
"id": 3,
"name": "Max",
"gender": "M",
"birthyear": 1995,
"breed": "Labrador Retriever"
},
{
"id": 2,
"name": "Vénus",
"gender": "F",
"birthyear": 2003,
"breed": "Jack Russel Terrier"
},
{
"id": 3,
"name": "Max",
"gender": "M",
"birthyear": 1995,
"breed": "Labrador Retriever"
}
]
"###);
@@ -314,19 +314,19 @@ async fn test_both_apis() {
"birthyear": 2011,
"breed": "Beagle"
},
{
"id": 3,
"name": "Max",
"gender": "M",
"birthyear": 1995,
"breed": "Labrador Retriever"
},
{
"id": 2,
"name": "Vénus",
"gender": "F",
"birthyear": 2003,
"breed": "Jack Russel Terrier"
},
{
"id": 3,
"name": "Max",
"gender": "M",
"birthyear": 1995,
"breed": "Labrador Retriever"
}
]
"###);
@@ -354,19 +354,19 @@ async fn test_both_apis() {
"birthyear": 2011,
"breed": "Beagle"
},
{
"id": 0,
"name": "kefir",
"gender": "M",
"birthyear": 2023,
"breed": "Patou"
},
{
"id": 2,
"name": "Vénus",
"gender": "F",
"birthyear": 2003,
"breed": "Jack Russel Terrier"
},
{
"id": 0,
"name": "kefir",
"gender": "M",
"birthyear": 2023,
"breed": "Patou"
}
]
"###);
@@ -394,19 +394,19 @@ async fn test_both_apis() {
"birthyear": 2011,
"breed": "Beagle"
},
{
"id": 0,
"name": "kefir",
"gender": "M",
"birthyear": 2023,
"breed": "Patou"
},
{
"id": 2,
"name": "Vénus",
"gender": "F",
"birthyear": 2003,
"breed": "Jack Russel Terrier"
},
{
"id": 0,
"name": "kefir",
"gender": "M",
"birthyear": 2023,
"breed": "Patou"
}
]
"###);
@@ -420,13 +420,6 @@ async fn test_both_apis() {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
[
{
"id": 0,
"name": "kefir",
"gender": "M",
"birthyear": 2023,
"breed": "Patou"
},
{
"id": 1,
"name": "Intel",
@@ -435,11 +428,11 @@ async fn test_both_apis() {
"breed": "Beagle"
},
{
"id": 3,
"name": "Max",
"id": 0,
"name": "kefir",
"gender": "M",
"birthyear": 1995,
"breed": "Labrador Retriever"
"birthyear": 2023,
"breed": "Patou"
},
{
"id": 2,
@@ -447,6 +440,13 @@ async fn test_both_apis() {
"gender": "F",
"birthyear": 2003,
"breed": "Jack Russel Terrier"
},
{
"id": 3,
"name": "Max",
"gender": "M",
"birthyear": 1995,
"breed": "Labrador Retriever"
}
]
"###);
@@ -460,13 +460,6 @@ async fn test_both_apis() {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
[
{
"id": 0,
"name": "kefir",
"gender": "M",
"birthyear": 2023,
"breed": "Patou"
},
{
"id": 1,
"name": "Intel",
@@ -475,11 +468,11 @@ async fn test_both_apis() {
"breed": "Beagle"
},
{
"id": 3,
"name": "Max",
"id": 0,
"name": "kefir",
"gender": "M",
"birthyear": 1995,
"breed": "Labrador Retriever"
"birthyear": 2023,
"breed": "Patou"
},
{
"id": 2,
@@ -487,6 +480,13 @@ async fn test_both_apis() {
"gender": "F",
"birthyear": 2003,
"breed": "Jack Russel Terrier"
},
{
"id": 3,
"name": "Max",
"gender": "M",
"birthyear": 1995,
"breed": "Labrador Retriever"
}
]
"###);

View File

@@ -91,7 +91,7 @@ rhai = { version = "1.23.6", features = [
"sync",
] }
arroy = "0.6.4-nested-rtxns"
hannoy = { version = "0.1.0-nested-rtxns", features = ["arroy"] }
hannoy = { version = "0.1.2-nested-rtxns", features = ["arroy"] }
rand = "0.8.5"
tracing = "0.1.41"
ureq = { version = "2.12.1", features = ["json"] }

View File

@@ -79,16 +79,27 @@ impl RankingRuleGraphTrait for FidGraph {
let mut edges = vec![];
for fid in all_fields.iter().copied() {
let weight = weights_map
.weight(fid)
.ok_or(InternalError::FieldidsWeightsMapMissingEntry { key: fid })?;
if weight > current_max_weight {
current_max_weight = weight;
match weights_map.weight(fid) {
Some(weight) => {
if weight > current_max_weight {
current_max_weight = weight;
}
edges.push((
weight as u32 * term.term_ids.len() as u32,
conditions_interner
.insert(FidCondition { term: term.clone(), fid: Some(fid) }),
));
}
None => {
// Hotfix: this is a temporary solution to handle the case where the weight is not found in the weights map.
// This is due to a database corruption in word_fid_docids database.
tracing::warn!(
"{:?}",
InternalError::FieldidsWeightsMapMissingEntry { key: fid }
);
}
}
edges.push((
weight as u32 * term.term_ids.len() as u32,
conditions_interner.insert(FidCondition { term: term.clone(), fid: Some(fid) }),
));
}
// always lookup the max_fid if we don't already and add an artificial condition for max scoring