Compare commits

..

1 Commits

Author SHA1 Message Date
ManyTheFish
920348ffa8 Kind word 2024-09-19 14:57:56 +02:00
56 changed files with 3734 additions and 4995 deletions

25
Cargo.lock generated
View File

@@ -386,16 +386,15 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]] [[package]]
name = "arroy" name = "arroy"
version = "0.5.0" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfc5f272f38fa063bbff0a7ab5219404e221493de005e2b4078c62d626ef567e" checksum = "2ece9e5347e7fdaaea3181dec7f916677ad5f3fcbac183648ce1924eb4aeef9a"
dependencies = [ dependencies = [
"bytemuck", "bytemuck",
"byteorder", "byteorder",
"heed", "heed",
"log", "log",
"memmap2", "memmap2",
"nohash",
"ordered-float", "ordered-float",
"rand", "rand",
"rayon", "rayon",
@@ -934,9 +933,9 @@ dependencies = [
[[package]] [[package]]
name = "charabia" name = "charabia"
version = "0.9.1" version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55ff52497324e7d168505a16949ae836c14595606fab94687238d2f6c8d4c798" checksum = "03cd8f290cae94934cdd0103c14c2de9faf2d7d85be0d24d511af2bf1b14119d"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"csv", "csv",
@@ -2839,7 +2838,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e310b3a6b5907f99202fcdb4960ff45b93735d7c7d96b760fcff8db2dc0e103d" checksum = "e310b3a6b5907f99202fcdb4960ff45b93735d7c7d96b760fcff8db2dc0e103d"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.52.4", "windows-targets 0.48.1",
] ]
[[package]] [[package]]
@@ -3687,12 +3686,6 @@ version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d02c0b00610773bb7fc61d85e13d86c7858cbdf00e1a120bfc41bc055dbaa0e" checksum = "6d02c0b00610773bb7fc61d85e13d86c7858cbdf00e1a120bfc41bc055dbaa0e"
[[package]]
name = "nohash"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0f889fb66f7acdf83442c35775764b51fed3c606ab9cee51500dbde2cf528ca"
[[package]] [[package]]
name = "nom" name = "nom"
version = "7.1.3" version = "7.1.3"
@@ -4582,8 +4575,9 @@ dependencies = [
[[package]] [[package]]
name = "rhai" name = "rhai"
version = "1.20.0" version = "1.19.0"
source = "git+https://github.com/rhaiscript/rhai?rev=ef3df63121d27aacd838f366f2b83fd65f20a1e4#ef3df63121d27aacd838f366f2b83fd65f20a1e4" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61797318be89b1a268a018a92a7657096d83f3ecb31418b9e9c16dcbb043b702"
dependencies = [ dependencies = [
"ahash 0.8.11", "ahash 0.8.11",
"bitflags 2.6.0", "bitflags 2.6.0",
@@ -4600,7 +4594,8 @@ dependencies = [
[[package]] [[package]]
name = "rhai_codegen" name = "rhai_codegen"
version = "2.2.0" version = "2.2.0"
source = "git+https://github.com/rhaiscript/rhai?rev=ef3df63121d27aacd838f366f2b83fd65f20a1e4#ef3df63121d27aacd838f366f2b83fd65f20a1e4" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5a11a05ee1ce44058fa3d5961d05194fdbe3ad6b40f904af764d81b86450e6b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",

View File

@@ -45,14 +45,14 @@ See the list of all our example apps in our [demos repository](https://github.co
## ✨ Features ## ✨ Features
- **Hybrid search:** Combine the best of both [semantic](https://www.meilisearch.com/docs/learn/experimental/vector_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) & full-text search to get the most relevant results - **Hybrid search:** Combine the best of both [semantic](https://www.meilisearch.com/docs/learn/experimental/vector_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) & full-text search to get the most relevant results
- **Search-as-you-type:** Find & display results in less than 50 milliseconds to provide an intuitive experience - **Search-as-you-type:** Find & display results in less than 50 milliseconds to provide an intuitive experience
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/relevancy/typo_tolerance_settings?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** get relevant matches even when queries contain typos and misspellings - **[Typo tolerance](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** get relevant matches even when queries contain typos and misspellings
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your users' search experience with custom filters and build a faceted search interface in a few lines of code - **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your users' search experience with custom filters and build a faceted search interface in a few lines of code
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need - **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
- **[Synonym support](https://www.meilisearch.com/docs/learn/relevancy/synonyms?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** configure synonyms to include more relevant content in your search results - **[Synonym support](https://www.meilisearch.com/docs/learn/configuration/synonyms?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** configure synonyms to include more relevant content in your search results
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** filter and sort documents based on geographic data - **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** filter and sort documents based on geographic data
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet - **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** control which users can access what data with API keys that allow fine-grained permissions handling - **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** control which users can access what data with API keys that allow fine-grained permissions handling
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/multitenancy_tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** personalize search results for any number of application tenants - **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** personalize search results for any number of application tenants
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets - **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** integrate Meilisearch in your technical stack with our plugins and SDKs - **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** integrate Meilisearch in your technical stack with our plugins and SDKs
- **Easy to install, deploy, and maintain** - **Easy to install, deploy, and maintain**

View File

@@ -255,8 +255,6 @@ pub(crate) mod test {
} }
"###); "###);
insta::assert_json_snapshot!(vector_index.settings().unwrap());
{ {
let documents: Result<Vec<_>> = vector_index.documents().unwrap().collect(); let documents: Result<Vec<_>> = vector_index.documents().unwrap().collect();
let mut documents = documents.unwrap(); let mut documents = documents.unwrap();

View File

@@ -1,56 +1,783 @@
--- ---
source: dump/src/reader/mod.rs source: dump/src/reader/mod.rs
expression: vector_index.settings().unwrap() expression: document
--- ---
{ {
"displayedAttributes": [ "id": "e3",
"*" "desc": "overriden vector + map",
], "_vectors": {
"searchableAttributes": [ "default": [
"*" 0.2,
], 0.1,
"filterableAttributes": [], 0.1,
"sortableAttributes": [], 0.1,
"rankingRules": [ 0.1,
"words", 0.1,
"typo", 0.1,
"proximity", 0.1,
"attribute", 0.1,
"sort", 0.1,
"exactness" 0.1,
], 0.1,
"stopWords": [], 0.1,
"nonSeparatorTokens": [], 0.1,
"separatorTokens": [], 0.1,
"dictionary": [], 0.1,
"synonyms": {}, 0.1,
"distinctAttribute": null, 0.1,
"proximityPrecision": "byWord", 0.1,
"typoTolerance": { 0.1,
"enabled": true, 0.1,
"minWordSizeForTypos": { 0.1,
"oneTypo": 5, 0.1,
"twoTypos": 9 0.1,
}, 0.1,
"disableOnWords": [], 0.1,
"disableOnAttributes": [] 0.1,
}, 0.1,
"faceting": { 0.1,
"maxValuesPerFacet": 100, 0.1,
"sortFacetValuesBy": { 0.1,
"*": "alpha" 0.1,
} 0.1,
}, 0.1,
"pagination": { 0.1,
"maxTotalHits": 1000 0.1,
}, 0.1,
"embedders": { 0.1,
"default": { 0.1,
"source": "huggingFace", 0.1,
"model": "BAAI/bge-base-en-v1.5", 0.1,
"revision": "617ca489d9e86b49b8167676d8220688b99db36e", 0.1,
"documentTemplate": "{% for field in fields %} {{ field.name }}: {{ field.value }}\n{% endfor %}" 0.1,
} 0.1,
}, 0.1,
"searchCutoffMs": null 0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1
],
"toto": [
0.1
]
}
} }

View File

@@ -1,780 +0,0 @@
---
source: dump/src/reader/mod.rs
expression: document
---
{
"id": "e0",
"desc": "overriden vector",
"_vectors": {
"default": [
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1,
0.1
]
}
}

View File

@@ -40,7 +40,7 @@ ureq = "2.10.0"
uuid = { version = "1.10.0", features = ["serde", "v4"] } uuid = { version = "1.10.0", features = ["serde", "v4"] }
[dev-dependencies] [dev-dependencies]
arroy = "0.5.0" arroy = "0.4.0"
big_s = "1.0.2" big_s = "1.0.2"
crossbeam = "0.8.4" crossbeam = "0.8.4"
insta = { version = "1.39.0", features = ["json", "redactions"] } insta = { version = "1.39.0", features = ["json", "redactions"] }

View File

@@ -1263,7 +1263,7 @@ impl IndexScheduler {
#[cfg(test)] #[cfg(test)]
self.maybe_fail(tests::FailureLocation::UpdatingTaskAfterProcessBatchFailure)?; self.maybe_fail(tests::FailureLocation::UpdatingTaskAfterProcessBatchFailure)?;
tracing::error!("Batch failed {}", error); tracing::info!("Batch failed {}", error);
self.update_task(&mut wtxn, &task) self.update_task(&mut wtxn, &task)
.map_err(|e| Error::TaskDatabaseUpdate(Box::new(e)))?; .map_err(|e| Error::TaskDatabaseUpdate(Box::new(e)))?;
@@ -1477,7 +1477,7 @@ impl IndexScheduler {
.map( .map(
|IndexEmbeddingConfig { |IndexEmbeddingConfig {
name, name,
config: milli::vector::EmbeddingConfig { embedder_options, prompt, quantized }, config: milli::vector::EmbeddingConfig { embedder_options, prompt },
.. ..
}| { }| {
let prompt = let prompt =
@@ -1486,10 +1486,7 @@ impl IndexScheduler {
{ {
let embedders = self.embedders.read().unwrap(); let embedders = self.embedders.read().unwrap();
if let Some(embedder) = embedders.get(&embedder_options) { if let Some(embedder) = embedders.get(&embedder_options) {
return Ok(( return Ok((name, (embedder.clone(), prompt)));
name,
(embedder.clone(), prompt, quantized.unwrap_or_default()),
));
} }
} }
@@ -1503,7 +1500,7 @@ impl IndexScheduler {
let mut embedders = self.embedders.write().unwrap(); let mut embedders = self.embedders.write().unwrap();
embedders.insert(embedder_options, embedder.clone()); embedders.insert(embedder_options, embedder.clone());
} }
Ok((name, (embedder, prompt, quantized.unwrap_or_default()))) Ok((name, (embedder, prompt)))
}, },
) )
.collect(); .collect();
@@ -5200,7 +5197,7 @@ mod tests {
let simple_hf_name = name.clone(); let simple_hf_name = name.clone();
let configs = index_scheduler.embedders(configs).unwrap(); let configs = index_scheduler.embedders(configs).unwrap();
let (hf_embedder, _, _) = configs.get(&simple_hf_name).unwrap(); let (hf_embedder, _) = configs.get(&simple_hf_name).unwrap();
let beagle_embed = hf_embedder.embed_one(S("Intel the beagle best doggo")).unwrap(); let beagle_embed = hf_embedder.embed_one(S("Intel the beagle best doggo")).unwrap();
let lab_embed = hf_embedder.embed_one(S("Max the lab best doggo")).unwrap(); let lab_embed = hf_embedder.embed_one(S("Max the lab best doggo")).unwrap();
let patou_embed = hf_embedder.embed_one(S("kefir the patou best doggo")).unwrap(); let patou_embed = hf_embedder.embed_one(S("kefir the patou best doggo")).unwrap();
@@ -5522,7 +5519,6 @@ mod tests {
400, 400,
), ),
}, },
quantized: None,
}, },
user_provided: RoaringBitmap<[1, 2]>, user_provided: RoaringBitmap<[1, 2]>,
}, },
@@ -5535,8 +5531,28 @@ mod tests {
// the document with the id 3 should keep its original embedding // the document with the id 3 should keep its original embedding
let docid = index.external_documents_ids.get(&rtxn, "3").unwrap().unwrap(); let docid = index.external_documents_ids.get(&rtxn, "3").unwrap().unwrap();
let embeddings = index.embeddings(&rtxn, docid).unwrap(); let mut embeddings = Vec::new();
let embeddings = &embeddings["my_doggo_embedder"];
'vectors: for i in 0..=u8::MAX {
let reader = arroy::Reader::open(&rtxn, i as u16, index.vector_arroy)
.map(Some)
.or_else(|e| match e {
arroy::Error::MissingMetadata(_) => Ok(None),
e => Err(e),
})
.transpose();
let Some(reader) = reader else {
break 'vectors;
};
let embedding = reader.unwrap().item_vector(&rtxn, docid).unwrap();
if let Some(embedding) = embedding {
embeddings.push(embedding)
} else {
break 'vectors;
}
}
snapshot!(embeddings.len(), @"1"); snapshot!(embeddings.len(), @"1");
assert!(embeddings[0].iter().all(|i| *i == 3.0), "{:?}", embeddings[0]); assert!(embeddings[0].iter().all(|i| *i == 3.0), "{:?}", embeddings[0]);
@@ -5721,7 +5737,6 @@ mod tests {
400, 400,
), ),
}, },
quantized: None,
}, },
user_provided: RoaringBitmap<[0]>, user_provided: RoaringBitmap<[0]>,
}, },
@@ -5765,7 +5780,6 @@ mod tests {
400, 400,
), ),
}, },
quantized: None,
}, },
user_provided: RoaringBitmap<[]>, user_provided: RoaringBitmap<[]>,
}, },

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[] []
---------------------------------------------------------------------- ----------------------------------------------------------------------
### All Tasks: ### All Tasks:
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} 0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
---------------------------------------------------------------------- ----------------------------------------------------------------------

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[] []
---------------------------------------------------------------------- ----------------------------------------------------------------------
### All Tasks: ### All Tasks:
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} 0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
---------------------------------------------------------------------- ----------------------------------------------------------------------

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[] []
---------------------------------------------------------------------- ----------------------------------------------------------------------
### All Tasks: ### All Tasks:
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} 0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
---------------------------------------------------------------------- ----------------------------------------------------------------------
### Status: ### Status:

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[] []
---------------------------------------------------------------------- ----------------------------------------------------------------------
### All Tasks: ### All Tasks:
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} 0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
---------------------------------------------------------------------- ----------------------------------------------------------------------
### Status: ### Status:

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[] []
---------------------------------------------------------------------- ----------------------------------------------------------------------
### All Tasks: ### All Tasks:
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} 0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
---------------------------------------------------------------------- ----------------------------------------------------------------------
### Status: ### Status:
enqueued [0,] enqueued [0,]

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[] []
---------------------------------------------------------------------- ----------------------------------------------------------------------
### All Tasks: ### All Tasks:
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} 0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
---------------------------------------------------------------------- ----------------------------------------------------------------------
### Status: ### Status:
enqueued [] enqueued []

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[] []
---------------------------------------------------------------------- ----------------------------------------------------------------------
### All Tasks: ### All Tasks:
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} 0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
---------------------------------------------------------------------- ----------------------------------------------------------------------
### Status: ### Status:
enqueued [0,] enqueued [0,]

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[] []
---------------------------------------------------------------------- ----------------------------------------------------------------------
### All Tasks: ### All Tasks:
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} 0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
---------------------------------------------------------------------- ----------------------------------------------------------------------
### Status: ### Status:
enqueued [] enqueued []

View File

@@ -66,8 +66,3 @@ khmer = ["milli/khmer"]
vietnamese = ["milli/vietnamese"] vietnamese = ["milli/vietnamese"]
# force swedish character recomposition # force swedish character recomposition
swedish-recomposition = ["milli/swedish-recomposition"] swedish-recomposition = ["milli/swedish-recomposition"]
# allow german tokenization
german = ["milli/german"]
# allow turkish normalization
turkish = ["milli/turkish"]

View File

@@ -395,10 +395,7 @@ impl ErrorCode for milli::Error {
| UserError::InvalidSettingsDimensions { .. } | UserError::InvalidSettingsDimensions { .. }
| UserError::InvalidUrl { .. } | UserError::InvalidUrl { .. }
| UserError::InvalidSettingsDocumentTemplateMaxBytes { .. } | UserError::InvalidSettingsDocumentTemplateMaxBytes { .. }
| UserError::InvalidPrompt(_) | UserError::InvalidPrompt(_) => Code::InvalidSettingsEmbedders,
| UserError::InvalidDisableBinaryQuantization { .. } => {
Code::InvalidSettingsEmbedders
}
UserError::TooManyEmbedders(_) => Code::InvalidSettingsEmbedders, UserError::TooManyEmbedders(_) => Code::InvalidSettingsEmbedders,
UserError::InvalidPromptForEmbeddings(..) => Code::InvalidSettingsEmbedders, UserError::InvalidPromptForEmbeddings(..) => Code::InvalidSettingsEmbedders,
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound, UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,

View File

@@ -39,14 +39,12 @@ macro_rules! make_locale {
pub enum Locale { pub enum Locale {
$($iso_639_1,)+ $($iso_639_1,)+
$($iso_639_3,)+ $($iso_639_3,)+
Cmn,
} }
impl From<milli::tokenizer::Language> for Locale { impl From<milli::tokenizer::Language> for Locale {
fn from(other: milli::tokenizer::Language) -> Locale { fn from(other: milli::tokenizer::Language) -> Locale {
match other { match other {
$(milli::tokenizer::Language::$iso_639_3 => Locale::$iso_639_3,)+ $(milli::tokenizer::Language::$iso_639_3 => Locale::$iso_639_3,)+
milli::tokenizer::Language::Cmn => Locale::Cmn,
} }
} }
} }
@@ -56,7 +54,6 @@ macro_rules! make_locale {
match other { match other {
$(Locale::$iso_639_1 => milli::tokenizer::Language::$iso_639_3,)+ $(Locale::$iso_639_1 => milli::tokenizer::Language::$iso_639_3,)+
$(Locale::$iso_639_3 => milli::tokenizer::Language::$iso_639_3,)+ $(Locale::$iso_639_3 => milli::tokenizer::Language::$iso_639_3,)+
Locale::Cmn => milli::tokenizer::Language::Cmn,
} }
} }
} }
@@ -68,7 +65,6 @@ macro_rules! make_locale {
let locale = match s { let locale = match s {
$($iso_639_1_str => Locale::$iso_639_1,)+ $($iso_639_1_str => Locale::$iso_639_1,)+
$($iso_639_3_str => Locale::$iso_639_3,)+ $($iso_639_3_str => Locale::$iso_639_3,)+
"cmn" => Locale::Cmn,
_ => return Err(LocaleFormatError { invalid_locale: s.to_string() }), _ => return Err(LocaleFormatError { invalid_locale: s.to_string() }),
}; };
@@ -83,9 +79,8 @@ macro_rules! make_locale {
impl std::fmt::Display for LocaleFormatError { impl std::fmt::Display for LocaleFormatError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut valid_locales = [$($iso_639_1_str),+,$($iso_639_3_str),+,"cmn"]; let valid_locales = [$($iso_639_1_str),+,$($iso_639_3_str),+].join(", ");
valid_locales.sort_by(|left, right| left.len().cmp(&right.len()).then(left.cmp(right))); write!(f, "Unsupported locale `{}`, expected one of {}", self.invalid_locale, valid_locales)
write!(f, "Unsupported locale `{}`, expected one of {}", self.invalid_locale, valid_locales.join(", "))
} }
} }
@@ -104,6 +99,7 @@ make_locale!(
(Bg, "bg") => (Bul, "bul"), (Bg, "bg") => (Bul, "bul"),
(Ca, "ca") => (Cat, "cat"), (Ca, "ca") => (Cat, "cat"),
(Cs, "cs") => (Ces, "ces"), (Cs, "cs") => (Ces, "ces"),
(Zh, "zh") => (Cmn, "cmn"),
(Da, "da") => (Dan, "dan"), (Da, "da") => (Dan, "dan"),
(De, "de") => (Deu, "deu"), (De, "de") => (Deu, "deu"),
(El, "el") => (Ell, "ell"), (El, "el") => (Ell, "ell"),
@@ -161,6 +157,5 @@ make_locale!(
(Uz, "uz") => (Uzb, "uzb"), (Uz, "uz") => (Uzb, "uzb"),
(Vi, "vi") => (Vie, "vie"), (Vi, "vi") => (Vie, "vie"),
(Yi, "yi") => (Yid, "yid"), (Yi, "yi") => (Yid, "yid"),
(Zh, "zh") => (Zho, "zho"),
(Zu, "zu") => (Zul, "zul"), (Zu, "zu") => (Zul, "zul"),
); );

View File

@@ -153,9 +153,7 @@ greek = ["meilisearch-types/greek"]
khmer = ["meilisearch-types/khmer"] khmer = ["meilisearch-types/khmer"]
vietnamese = ["meilisearch-types/vietnamese"] vietnamese = ["meilisearch-types/vietnamese"]
swedish-recomposition = ["meilisearch-types/swedish-recomposition"] swedish-recomposition = ["meilisearch-types/swedish-recomposition"]
german = ["meilisearch-types/german"]
turkish = ["meilisearch-types/turkish"]
[package.metadata.mini-dashboard] [package.metadata.mini-dashboard]
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.15/build.zip" assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.14/build.zip"
sha1 = "d057600b4a839a2e0c0be7a372cd1b2683f3ca7e" sha1 = "592d1b5a3459d621d0aae1dded8fe3154f5c38fe"

View File

@@ -265,8 +265,6 @@ struct Infos {
experimental_contains_filter: bool, experimental_contains_filter: bool,
experimental_enable_metrics: bool, experimental_enable_metrics: bool,
experimental_search_queue_size: usize, experimental_search_queue_size: usize,
experimental_drop_search_after: usize,
experimental_nb_searches_per_core: usize,
experimental_logs_mode: LogMode, experimental_logs_mode: LogMode,
experimental_replication_parameters: bool, experimental_replication_parameters: bool,
experimental_enable_logs_route: bool, experimental_enable_logs_route: bool,
@@ -310,8 +308,6 @@ impl From<Opt> for Infos {
experimental_contains_filter, experimental_contains_filter,
experimental_enable_metrics, experimental_enable_metrics,
experimental_search_queue_size, experimental_search_queue_size,
experimental_drop_search_after,
experimental_nb_searches_per_core,
experimental_logs_mode, experimental_logs_mode,
experimental_replication_parameters, experimental_replication_parameters,
experimental_enable_logs_route, experimental_enable_logs_route,
@@ -363,8 +359,6 @@ impl From<Opt> for Infos {
experimental_contains_filter, experimental_contains_filter,
experimental_enable_metrics, experimental_enable_metrics,
experimental_search_queue_size, experimental_search_queue_size,
experimental_drop_search_after: experimental_drop_search_after.into(),
experimental_nb_searches_per_core: experimental_nb_searches_per_core.into(),
experimental_logs_mode, experimental_logs_mode,
experimental_replication_parameters, experimental_replication_parameters,
experimental_enable_logs_route, experimental_enable_logs_route,
@@ -652,6 +646,8 @@ pub struct SearchAggregator {
max_vector_size: usize, max_vector_size: usize,
// Whether the semantic ratio passed to a hybrid search equals the default ratio. // Whether the semantic ratio passed to a hybrid search equals the default ratio.
semantic_ratio: bool, semantic_ratio: bool,
// Whether a non-default embedder was specified
embedder: bool,
hybrid: bool, hybrid: bool,
retrieve_vectors: bool, retrieve_vectors: bool,
@@ -799,6 +795,7 @@ impl SearchAggregator {
if let Some(hybrid) = hybrid { if let Some(hybrid) = hybrid {
ret.semantic_ratio = hybrid.semantic_ratio != DEFAULT_SEMANTIC_RATIO(); ret.semantic_ratio = hybrid.semantic_ratio != DEFAULT_SEMANTIC_RATIO();
ret.embedder = hybrid.embedder.is_some();
ret.hybrid = true; ret.hybrid = true;
} }
@@ -866,6 +863,7 @@ impl SearchAggregator {
show_ranking_score, show_ranking_score,
show_ranking_score_details, show_ranking_score_details,
semantic_ratio, semantic_ratio,
embedder,
hybrid, hybrid,
total_degraded, total_degraded,
total_used_negative_operator, total_used_negative_operator,
@@ -925,6 +923,7 @@ impl SearchAggregator {
self.retrieve_vectors |= retrieve_vectors; self.retrieve_vectors |= retrieve_vectors;
self.semantic_ratio |= semantic_ratio; self.semantic_ratio |= semantic_ratio;
self.hybrid |= hybrid; self.hybrid |= hybrid;
self.embedder |= embedder;
// pagination // pagination
self.max_limit = self.max_limit.max(max_limit); self.max_limit = self.max_limit.max(max_limit);
@@ -1000,6 +999,7 @@ impl SearchAggregator {
show_ranking_score, show_ranking_score,
show_ranking_score_details, show_ranking_score_details,
semantic_ratio, semantic_ratio,
embedder,
hybrid, hybrid,
total_degraded, total_degraded,
total_used_negative_operator, total_used_negative_operator,
@@ -1051,6 +1051,7 @@ impl SearchAggregator {
"hybrid": { "hybrid": {
"enabled": hybrid, "enabled": hybrid,
"semantic_ratio": semantic_ratio, "semantic_ratio": semantic_ratio,
"embedder": embedder,
}, },
"pagination": { "pagination": {
"max_limit": max_limit, "max_limit": max_limit,
@@ -1578,10 +1579,6 @@ impl EditDocumentsByFunctionAggregator {
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> { pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
let Self { timestamp, user_agents, index_creation, filtered, with_context } = self; let Self { timestamp, user_agents, index_creation, filtered, with_context } = self;
// if we had no timestamp it means we never encountered any events and
// thus we don't need to send this event.
let timestamp = timestamp?;
let properties = json!({ let properties = json!({
"user-agent": user_agents, "user-agent": user_agents,
"filtered": filtered, "filtered": filtered,
@@ -1590,7 +1587,7 @@ impl EditDocumentsByFunctionAggregator {
}); });
Some(Track { Some(Track {
timestamp: Some(timestamp), timestamp,
user: user.clone(), user: user.clone(),
event: event_name.to_string(), event: event_name.to_string(),
properties, properties,
@@ -1785,6 +1782,7 @@ pub struct SimilarAggregator {
used_syntax: HashMap<String, usize>, used_syntax: HashMap<String, usize>,
// Whether a non-default embedder was specified // Whether a non-default embedder was specified
embedder: bool,
retrieve_vectors: bool, retrieve_vectors: bool,
// pagination // pagination
@@ -1805,7 +1803,7 @@ impl SimilarAggregator {
pub fn from_query(query: &SimilarQuery, request: &HttpRequest) -> Self { pub fn from_query(query: &SimilarQuery, request: &HttpRequest) -> Self {
let SimilarQuery { let SimilarQuery {
id: _, id: _,
embedder: _, embedder,
offset, offset,
limit, limit,
attributes_to_retrieve: _, attributes_to_retrieve: _,
@@ -1853,6 +1851,7 @@ impl SimilarAggregator {
ret.show_ranking_score_details = *show_ranking_score_details; ret.show_ranking_score_details = *show_ranking_score_details;
ret.ranking_score_threshold = ranking_score_threshold.is_some(); ret.ranking_score_threshold = ranking_score_threshold.is_some();
ret.embedder = embedder.is_some();
ret.retrieve_vectors = *retrieve_vectors; ret.retrieve_vectors = *retrieve_vectors;
ret ret
@@ -1884,6 +1883,7 @@ impl SimilarAggregator {
max_attributes_to_retrieve, max_attributes_to_retrieve,
show_ranking_score, show_ranking_score,
show_ranking_score_details, show_ranking_score_details,
embedder,
ranking_score_threshold, ranking_score_threshold,
retrieve_vectors, retrieve_vectors,
} = other; } = other;
@@ -1914,6 +1914,7 @@ impl SimilarAggregator {
*used_syntax = used_syntax.saturating_add(value); *used_syntax = used_syntax.saturating_add(value);
} }
self.embedder |= embedder;
self.retrieve_vectors |= retrieve_vectors; self.retrieve_vectors |= retrieve_vectors;
// pagination // pagination
@@ -1947,6 +1948,7 @@ impl SimilarAggregator {
max_attributes_to_retrieve, max_attributes_to_retrieve,
show_ranking_score, show_ranking_score,
show_ranking_score_details, show_ranking_score_details,
embedder,
ranking_score_threshold, ranking_score_threshold,
retrieve_vectors, retrieve_vectors,
} = self; } = self;
@@ -1978,6 +1980,9 @@ impl SimilarAggregator {
"vector": { "vector": {
"retrieve_vectors": retrieve_vectors, "retrieve_vectors": retrieve_vectors,
}, },
"hybrid": {
"embedder": embedder,
},
"pagination": { "pagination": {
"max_limit": max_limit, "max_limit": max_limit,
"max_offset": max_offset, "max_offset": max_offset,

View File

@@ -72,7 +72,7 @@ pub enum MeilisearchHttpError {
DocumentFormat(#[from] DocumentFormatError), DocumentFormat(#[from] DocumentFormatError),
#[error(transparent)] #[error(transparent)]
Join(#[from] JoinError), Join(#[from] JoinError),
#[error("Invalid request: missing `hybrid` parameter when `vector` is present.")] #[error("Invalid request: missing `hybrid` parameter when both `q` and `vector` are present.")]
MissingSearchHybrid, MissingSearchHybrid,
} }

View File

@@ -5,7 +5,6 @@ use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use std::thread::available_parallelism; use std::thread::available_parallelism;
use std::time::Duration;
use actix_web::http::KeepAlive; use actix_web::http::KeepAlive;
use actix_web::web::Data; use actix_web::web::Data;
@@ -154,14 +153,8 @@ async fn run_http(
let auth_controller = Data::from(auth_controller); let auth_controller = Data::from(auth_controller);
let search_queue = SearchQueue::new( let search_queue = SearchQueue::new(
opt.experimental_search_queue_size, opt.experimental_search_queue_size,
available_parallelism() available_parallelism().unwrap_or(NonZeroUsize::new(2).unwrap()),
.unwrap_or(NonZeroUsize::new(2).unwrap()) );
.checked_mul(opt.experimental_nb_searches_per_core)
.unwrap_or(NonZeroUsize::MAX),
)
.with_time_to_abort(Duration::from_secs(
usize::from(opt.experimental_drop_search_after) as u64
));
let search_queue = Data::new(search_queue); let search_queue = Data::new(search_queue);
let http_server = HttpServer::new(move || { let http_server = HttpServer::new(move || {

View File

@@ -2,7 +2,7 @@ use std::env::VarError;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::fmt::Display; use std::fmt::Display;
use std::io::{BufReader, Read}; use std::io::{BufReader, Read};
use std::num::{NonZeroUsize, ParseIntError}; use std::num::ParseIntError;
use std::ops::Deref; use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
@@ -55,8 +55,6 @@ const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LO
const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER"; const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER";
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS"; const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE"; const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE";
const MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER: &str = "MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER";
const MEILI_EXPERIMENTAL_NB_SEARCHES_PER_CORE: &str = "MEILI_EXPERIMENTAL_NB_SEARCHES_PER_CORE";
const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str = const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
"MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE"; "MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE";
const MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS: &str = const MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS: &str =
@@ -359,26 +357,10 @@ pub struct Opt {
/// Lets you customize the size of the search queue. Meilisearch processes your search requests as fast as possible but once the /// Lets you customize the size of the search queue. Meilisearch processes your search requests as fast as possible but once the
/// queue is full it starts returning HTTP 503, Service Unavailable. /// queue is full it starts returning HTTP 503, Service Unavailable.
/// The default value is 1000. /// The default value is 1000.
#[clap(long, env = MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE, default_value_t = default_experimental_search_queue_size())] #[clap(long, env = MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE, default_value_t = 1000)]
#[serde(default = "default_experimental_search_queue_size")] #[serde(default)]
pub experimental_search_queue_size: usize, pub experimental_search_queue_size: usize,
/// Experimental drop search after. For more information, see: <https://github.com/orgs/meilisearch/discussions/783>
///
/// Let you customize after how many seconds Meilisearch should consider a search request irrelevant and drop it.
/// The default value is 60.
#[clap(long, env = MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER, default_value_t = default_drop_search_after())]
#[serde(default = "default_drop_search_after")]
pub experimental_drop_search_after: NonZeroUsize,
/// Experimental number of searches per core. For more information, see: <https://github.com/orgs/meilisearch/discussions/784>
///
/// Lets you customize how many search requests can run on each core concurrently.
/// The default value is 4.
#[clap(long, env = MEILI_EXPERIMENTAL_NB_SEARCHES_PER_CORE, default_value_t = default_nb_searches_per_core())]
#[serde(default = "default_nb_searches_per_core")]
pub experimental_nb_searches_per_core: NonZeroUsize,
/// Experimental logs mode feature. For more information, see: <https://github.com/orgs/meilisearch/discussions/723> /// Experimental logs mode feature. For more information, see: <https://github.com/orgs/meilisearch/discussions/723>
/// ///
/// Change the mode of the logs on the console. /// Change the mode of the logs on the console.
@@ -510,8 +492,6 @@ impl Opt {
experimental_contains_filter, experimental_contains_filter,
experimental_enable_metrics, experimental_enable_metrics,
experimental_search_queue_size, experimental_search_queue_size,
experimental_drop_search_after,
experimental_nb_searches_per_core,
experimental_logs_mode, experimental_logs_mode,
experimental_enable_logs_route, experimental_enable_logs_route,
experimental_replication_parameters, experimental_replication_parameters,
@@ -579,14 +559,6 @@ impl Opt {
MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE, MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE,
experimental_search_queue_size.to_string(), experimental_search_queue_size.to_string(),
); );
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER,
experimental_drop_search_after.to_string(),
);
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_NB_SEARCHES_PER_CORE,
experimental_nb_searches_per_core.to_string(),
);
export_to_env_if_not_present( export_to_env_if_not_present(
MEILI_EXPERIMENTAL_LOGS_MODE, MEILI_EXPERIMENTAL_LOGS_MODE,
experimental_logs_mode.to_string(), experimental_logs_mode.to_string(),
@@ -918,18 +890,6 @@ fn default_dump_dir() -> PathBuf {
PathBuf::from(DEFAULT_DUMP_DIR) PathBuf::from(DEFAULT_DUMP_DIR)
} }
fn default_experimental_search_queue_size() -> usize {
1000
}
fn default_drop_search_after() -> NonZeroUsize {
NonZeroUsize::new(60).unwrap()
}
fn default_nb_searches_per_core() -> NonZeroUsize {
NonZeroUsize::new(4).unwrap()
}
/// Indicates if a snapshot was scheduled, and if yes with which interval. /// Indicates if a snapshot was scheduled, and if yes with which interval.
#[derive(Debug, Default, Copy, Clone, Deserialize, Serialize)] #[derive(Debug, Default, Copy, Clone, Deserialize, Serialize)]
pub enum ScheduleSnapshot { pub enum ScheduleSnapshot {

View File

@@ -128,10 +128,8 @@ impl std::ops::Deref for SemanticRatioGet {
} }
} }
impl TryFrom<SearchQueryGet> for SearchQuery { impl From<SearchQueryGet> for SearchQuery {
type Error = ResponseError; fn from(other: SearchQueryGet) -> Self {
fn try_from(other: SearchQueryGet) -> Result<Self, Self::Error> {
let filter = match other.filter { let filter = match other.filter {
Some(f) => match serde_json::from_str(&f) { Some(f) => match serde_json::from_str(&f) {
Ok(v) => Some(v), Ok(v) => Some(v),
@@ -142,28 +140,19 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
let hybrid = match (other.hybrid_embedder, other.hybrid_semantic_ratio) { let hybrid = match (other.hybrid_embedder, other.hybrid_semantic_ratio) {
(None, None) => None, (None, None) => None,
(None, Some(_)) => { (None, Some(semantic_ratio)) => {
return Err(ResponseError::from_msg( Some(HybridQuery { semantic_ratio: *semantic_ratio, embedder: None })
"`hybridEmbedder` is mandatory when `hybridSemanticRatio` is present".into(),
meilisearch_types::error::Code::InvalidHybridQuery,
));
}
(Some(embedder), None) => {
Some(HybridQuery { semantic_ratio: DEFAULT_SEMANTIC_RATIO(), embedder })
} }
(Some(embedder), None) => Some(HybridQuery {
semantic_ratio: DEFAULT_SEMANTIC_RATIO(),
embedder: Some(embedder),
}),
(Some(embedder), Some(semantic_ratio)) => { (Some(embedder), Some(semantic_ratio)) => {
Some(HybridQuery { semantic_ratio: *semantic_ratio, embedder }) Some(HybridQuery { semantic_ratio: *semantic_ratio, embedder: Some(embedder) })
} }
}; };
if other.vector.is_some() && hybrid.is_none() { Self {
return Err(ResponseError::from_msg(
"`hybridEmbedder` is mandatory when `vector` is present".into(),
meilisearch_types::error::Code::MissingSearchHybrid,
));
}
Ok(Self {
q: other.q, q: other.q,
vector: other.vector.map(CS::into_inner), vector: other.vector.map(CS::into_inner),
offset: other.offset.0, offset: other.offset.0,
@@ -190,7 +179,7 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
hybrid, hybrid,
ranking_score_threshold: other.ranking_score_threshold.map(|o| o.0), ranking_score_threshold: other.ranking_score_threshold.map(|o| o.0),
locales: other.locales.map(|o| o.into_iter().collect()), locales: other.locales.map(|o| o.into_iter().collect()),
}) }
} }
} }
@@ -230,7 +219,7 @@ pub async fn search_with_url_query(
debug!(parameters = ?params, "Search get"); debug!(parameters = ?params, "Search get");
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let mut query: SearchQuery = params.into_inner().try_into()?; let mut query: SearchQuery = params.into_inner().into();
// Tenant token search_rules. // Tenant token search_rules.
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) { if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
@@ -323,36 +312,44 @@ pub fn search_kind(
features.check_vector("Passing `hybrid` as a parameter")?; features.check_vector("Passing `hybrid` as a parameter")?;
} }
// handle with care, the order of cases matters, the semantics is subtle // regardless of anything, always do a keyword search when we don't have a vector and the query is whitespace or missing
match (query.q.as_deref(), &query.hybrid, query.vector.as_deref()) { if query.vector.is_none() {
// empty query, no vector => placeholder search match &query.q {
(Some(q), _, None) if q.trim().is_empty() => Ok(SearchKind::KeywordOnly), Some(q) if q.trim().is_empty() => return Ok(SearchKind::KeywordOnly),
// no query, no vector => placeholder search None => return Ok(SearchKind::KeywordOnly),
(None, _, None) => Ok(SearchKind::KeywordOnly), _ => {}
// hybrid.semantic_ratio == 1.0 => vector
(_, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => {
SearchKind::semantic(index_scheduler, index, embedder, v.map(|v| v.len()))
} }
// hybrid.semantic_ratio == 0.0 => keyword }
(_, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => {
match &query.hybrid {
Some(HybridQuery { semantic_ratio, embedder }) if **semantic_ratio == 1.0 => {
Ok(SearchKind::semantic(
index_scheduler,
index,
embedder.as_deref(),
query.vector.as_ref().map(Vec::len),
)?)
}
Some(HybridQuery { semantic_ratio, embedder: _ }) if **semantic_ratio == 0.0 => {
Ok(SearchKind::KeywordOnly) Ok(SearchKind::KeywordOnly)
} }
// no query, hybrid, vector => semantic Some(HybridQuery { semantic_ratio, embedder }) => Ok(SearchKind::hybrid(
(None, Some(HybridQuery { semantic_ratio: _, embedder }), Some(v)) => {
SearchKind::semantic(index_scheduler, index, embedder, Some(v.len()))
}
// query, no hybrid, no vector => keyword
(Some(_), None, None) => Ok(SearchKind::KeywordOnly),
// query, hybrid, maybe vector => hybrid
(Some(_), Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid(
index_scheduler, index_scheduler,
index, index,
embedder, embedder.as_deref(),
**semantic_ratio, **semantic_ratio,
v.map(|v| v.len()), query.vector.as_ref().map(Vec::len),
), )?),
None => match (query.q.as_deref(), query.vector.as_deref()) {
(_, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()), (_query, None) => Ok(SearchKind::KeywordOnly),
(None, Some(_vector)) => Ok(SearchKind::semantic(
index_scheduler,
index,
None,
query.vector.as_ref().map(Vec::len),
)?),
(Some(_), Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
},
} }
} }

View File

@@ -643,19 +643,12 @@ fn embedder_analytics(
.max() .max()
}); });
let binary_quantization_used = setting.as_ref().map(|map| {
map.values()
.filter_map(|config| config.clone().set())
.any(|config| config.binary_quantized.set().is_some())
});
json!( json!(
{ {
"total": setting.as_ref().map(|s| s.len()), "total": setting.as_ref().map(|s| s.len()),
"sources": sources, "sources": sources,
"document_template_used": document_template_used, "document_template_used": document_template_used,
"document_template_max_bytes": document_template_max_bytes, "document_template_max_bytes": document_template_max_bytes
"binary_quantization_used": binary_quantization_used,
} }
) )
} }

View File

@@ -102,8 +102,8 @@ async fn similar(
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let (embedder_name, embedder, quantized) = let (embedder_name, embedder) =
SearchKind::embedder(&index_scheduler, &index, &query.embedder, None)?; SearchKind::embedder(&index_scheduler, &index, query.embedder.as_deref(), None)?;
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
perform_similar( perform_similar(
@@ -111,7 +111,6 @@ async fn similar(
query, query,
embedder_name, embedder_name,
embedder, embedder,
quantized,
retrieve_vectors, retrieve_vectors,
index_scheduler.features(), index_scheduler.features(),
) )
@@ -140,8 +139,8 @@ pub struct SimilarQueryGet {
show_ranking_score_details: Param<bool>, show_ranking_score_details: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarRankingScoreThreshold>, default)] #[deserr(default, error = DeserrQueryParamError<InvalidSimilarRankingScoreThreshold>, default)]
pub ranking_score_threshold: Option<RankingScoreThresholdGet>, pub ranking_score_threshold: Option<RankingScoreThresholdGet>,
#[deserr(error = DeserrQueryParamError<InvalidEmbedder>)] #[deserr(default, error = DeserrQueryParamError<InvalidEmbedder>)]
pub embedder: String, pub embedder: Option<String>,
} }
#[derive(Debug, Clone, Copy, PartialEq, deserr::Deserr)] #[derive(Debug, Clone, Copy, PartialEq, deserr::Deserr)]

View File

@@ -267,54 +267,58 @@ impl fmt::Debug for SearchQuery {
pub struct HybridQuery { pub struct HybridQuery {
#[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>, default)] #[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>, default)]
pub semantic_ratio: SemanticRatio, pub semantic_ratio: SemanticRatio,
#[deserr(error = DeserrJsonError<InvalidEmbedder>)] #[deserr(default, error = DeserrJsonError<InvalidEmbedder>, default)]
pub embedder: String, pub embedder: Option<String>,
} }
#[derive(Clone)] #[derive(Clone)]
pub enum SearchKind { pub enum SearchKind {
KeywordOnly, KeywordOnly,
SemanticOnly { embedder_name: String, embedder: Arc<Embedder>, quantized: bool }, SemanticOnly { embedder_name: String, embedder: Arc<Embedder> },
Hybrid { embedder_name: String, embedder: Arc<Embedder>, quantized: bool, semantic_ratio: f32 }, Hybrid { embedder_name: String, embedder: Arc<Embedder>, semantic_ratio: f32 },
} }
impl SearchKind { impl SearchKind {
pub(crate) fn semantic( pub(crate) fn semantic(
index_scheduler: &index_scheduler::IndexScheduler, index_scheduler: &index_scheduler::IndexScheduler,
index: &Index, index: &Index,
embedder_name: &str, embedder_name: Option<&str>,
vector_len: Option<usize>, vector_len: Option<usize>,
) -> Result<Self, ResponseError> { ) -> Result<Self, ResponseError> {
let (embedder_name, embedder, quantized) = let (embedder_name, embedder) =
Self::embedder(index_scheduler, index, embedder_name, vector_len)?; Self::embedder(index_scheduler, index, embedder_name, vector_len)?;
Ok(Self::SemanticOnly { embedder_name, embedder, quantized }) Ok(Self::SemanticOnly { embedder_name, embedder })
} }
pub(crate) fn hybrid( pub(crate) fn hybrid(
index_scheduler: &index_scheduler::IndexScheduler, index_scheduler: &index_scheduler::IndexScheduler,
index: &Index, index: &Index,
embedder_name: &str, embedder_name: Option<&str>,
semantic_ratio: f32, semantic_ratio: f32,
vector_len: Option<usize>, vector_len: Option<usize>,
) -> Result<Self, ResponseError> { ) -> Result<Self, ResponseError> {
let (embedder_name, embedder, quantized) = let (embedder_name, embedder) =
Self::embedder(index_scheduler, index, embedder_name, vector_len)?; Self::embedder(index_scheduler, index, embedder_name, vector_len)?;
Ok(Self::Hybrid { embedder_name, embedder, quantized, semantic_ratio }) Ok(Self::Hybrid { embedder_name, embedder, semantic_ratio })
} }
pub(crate) fn embedder( pub(crate) fn embedder(
index_scheduler: &index_scheduler::IndexScheduler, index_scheduler: &index_scheduler::IndexScheduler,
index: &Index, index: &Index,
embedder_name: &str, embedder_name: Option<&str>,
vector_len: Option<usize>, vector_len: Option<usize>,
) -> Result<(String, Arc<Embedder>, bool), ResponseError> { ) -> Result<(String, Arc<Embedder>), ResponseError> {
let embedder_configs = index.embedding_configs(&index.read_txn()?)?; let embedder_configs = index.embedding_configs(&index.read_txn()?)?;
let embedders = index_scheduler.embedders(embedder_configs)?; let embedders = index_scheduler.embedders(embedder_configs)?;
let (embedder, _, quantized) = embedders let embedder_name = embedder_name.unwrap_or_else(|| embedders.get_default_embedder_name());
.get(embedder_name)
let embedder = embedders.get(embedder_name);
let embedder = embedder
.ok_or(milli::UserError::InvalidEmbedder(embedder_name.to_owned())) .ok_or(milli::UserError::InvalidEmbedder(embedder_name.to_owned()))
.map_err(milli::Error::from)?; .map_err(milli::Error::from)?
.0;
if let Some(vector_len) = vector_len { if let Some(vector_len) = vector_len {
if vector_len != embedder.dimensions() { if vector_len != embedder.dimensions() {
@@ -328,7 +332,7 @@ impl SearchKind {
} }
} }
Ok((embedder_name.to_owned(), embedder, quantized)) Ok((embedder_name.to_owned(), embedder))
} }
} }
@@ -534,8 +538,8 @@ pub struct SimilarQuery {
pub limit: usize, pub limit: usize,
#[deserr(default, error = DeserrJsonError<InvalidSimilarFilter>)] #[deserr(default, error = DeserrJsonError<InvalidSimilarFilter>)]
pub filter: Option<Value>, pub filter: Option<Value>,
#[deserr(error = DeserrJsonError<InvalidEmbedder>)] #[deserr(default, error = DeserrJsonError<InvalidEmbedder>, default)]
pub embedder: String, pub embedder: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSimilarAttributesToRetrieve>)] #[deserr(default, error = DeserrJsonError<InvalidSimilarAttributesToRetrieve>)]
pub attributes_to_retrieve: Option<BTreeSet<String>>, pub attributes_to_retrieve: Option<BTreeSet<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSimilarRetrieveVectors>)] #[deserr(default, error = DeserrJsonError<InvalidSimilarRetrieveVectors>)]
@@ -789,7 +793,7 @@ fn prepare_search<'t>(
search.query(q); search.query(q);
} }
} }
SearchKind::SemanticOnly { embedder_name, embedder, quantized } => { SearchKind::SemanticOnly { embedder_name, embedder } => {
let vector = match query.vector.clone() { let vector = match query.vector.clone() {
Some(vector) => vector, Some(vector) => vector,
None => { None => {
@@ -803,19 +807,14 @@ fn prepare_search<'t>(
} }
}; };
search.semantic(embedder_name.clone(), embedder.clone(), *quantized, Some(vector)); search.semantic(embedder_name.clone(), embedder.clone(), Some(vector));
} }
SearchKind::Hybrid { embedder_name, embedder, quantized, semantic_ratio: _ } => { SearchKind::Hybrid { embedder_name, embedder, semantic_ratio: _ } => {
if let Some(q) = &query.q { if let Some(q) = &query.q {
search.query(q); search.query(q);
} }
// will be embedded in hybrid search if necessary // will be embedded in hybrid search if necessary
search.semantic( search.semantic(embedder_name.clone(), embedder.clone(), query.vector.clone());
embedder_name.clone(),
embedder.clone(),
*quantized,
query.vector.clone(),
);
} }
} }
@@ -1195,13 +1194,8 @@ impl<'a> HitMaker<'a> {
let vectors_is_hidden = match (&displayed_ids, vectors_fid) { let vectors_is_hidden = match (&displayed_ids, vectors_fid) {
// displayed_ids is a wildcard, so `_vectors` can be displayed regardless of its fid // displayed_ids is a wildcard, so `_vectors` can be displayed regardless of its fid
(None, _) => false, (None, _) => false,
// vectors has no fid, so check its explicit name // displayed_ids is a finite list, and `_vectors` cannot be part of it because it is not an existing field
(Some(_), None) => { (Some(_), None) => true,
// unwrap as otherwise we'd go to the first one
let displayed_names = index.displayed_fields(rtxn)?.unwrap();
!displayed_names
.contains(&milli::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME)
}
// displayed_ids is a finit list, so hide if `_vectors` is not part of it // displayed_ids is a finit list, so hide if `_vectors` is not part of it
(Some(map), Some(vectors_fid)) => map.contains(&vectors_fid), (Some(map), Some(vectors_fid)) => map.contains(&vectors_fid),
}; };
@@ -1449,7 +1443,6 @@ pub fn perform_similar(
query: SimilarQuery, query: SimilarQuery,
embedder_name: String, embedder_name: String,
embedder: Arc<Embedder>, embedder: Arc<Embedder>,
quantized: bool,
retrieve_vectors: RetrieveVectors, retrieve_vectors: RetrieveVectors,
features: RoFeatures, features: RoFeatures,
) -> Result<SimilarResult, ResponseError> { ) -> Result<SimilarResult, ResponseError> {
@@ -1478,16 +1471,8 @@ pub fn perform_similar(
)); ));
}; };
let mut similar = milli::Similar::new( let mut similar =
internal_id, milli::Similar::new(internal_id, offset, limit, index, &rtxn, embedder_name, embedder);
offset,
limit,
index,
&rtxn,
embedder_name,
embedder,
quantized,
);
if let Some(ref filter) = query.filter { if let Some(ref filter) = query.filter {
if let Some(facets) = parse_filter(filter, Code::InvalidSimilarFilter, features)? { if let Some(facets) = parse_filter(filter, Code::InvalidSimilarFilter, features)? {

View File

@@ -128,7 +128,7 @@ async fn simple_search() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.2, "embedder": "default"}, "retrieveVectors": true}), json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.2}, "retrieveVectors": true}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -137,7 +137,7 @@ async fn simple_search() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5, "embedder": "default"}, "showRankingScore": true, "retrieveVectors": true}), json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5}, "showRankingScore": true, "retrieveVectors": true}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -146,7 +146,7 @@ async fn simple_search() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.8, "embedder": "default"}, "showRankingScore": true, "retrieveVectors": true}), json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.8}, "showRankingScore": true, "retrieveVectors": true}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -161,7 +161,7 @@ async fn limit_offset() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.2, "embedder": "default"}, "retrieveVectors": true, "offset": 1, "limit": 1}), json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.2}, "retrieveVectors": true, "offset": 1, "limit": 1}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -174,7 +174,7 @@ async fn limit_offset() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.9, "embedder": "default"}, "retrieveVectors": true, "offset": 1, "limit": 1}), json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.9}, "retrieveVectors": true, "offset": 1, "limit": 1}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -188,11 +188,8 @@ async fn simple_search_hf() {
let server = Server::new().await; let server = Server::new().await;
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await; let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (response, code) = index let (response, code) =
.search_post( index.search_post(json!({"q": "Captain", "hybrid": {"semanticRatio": 0.2}})).await;
json!({"q": "Captain", "hybrid": {"semanticRatio": 0.2, "embedder": "default"}}),
)
.await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2"},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3"},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1"}]"###); snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2"},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3"},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1"}]"###);
snapshot!(response["semanticHitCount"], @"0"); snapshot!(response["semanticHitCount"], @"0");
@@ -200,7 +197,7 @@ async fn simple_search_hf() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
// disable ranking score as the vectors between architectures are not equal // disable ranking score as the vectors between architectures are not equal
json!({"q": "Captain", "hybrid": {"embedder": "default", "semanticRatio": 0.55}, "showRankingScore": false}), json!({"q": "Captain", "hybrid": {"semanticRatio": 0.55}, "showRankingScore": false}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -209,7 +206,7 @@ async fn simple_search_hf() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "hybrid": {"embedder": "default", "semanticRatio": 0.8}, "showRankingScore": false}), json!({"q": "Captain", "hybrid": {"semanticRatio": 0.8}, "showRankingScore": false}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -218,7 +215,7 @@ async fn simple_search_hf() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Movie World", "hybrid": {"embedder": "default", "semanticRatio": 0.2}, "showRankingScore": false}), json!({"q": "Movie World", "hybrid": {"semanticRatio": 0.2}, "showRankingScore": false}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -227,7 +224,7 @@ async fn simple_search_hf() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Wonder replacement", "hybrid": {"embedder": "default", "semanticRatio": 0.2}, "showRankingScore": false}), json!({"q": "Wonder replacement", "hybrid": {"semanticRatio": 0.2}, "showRankingScore": false}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -240,7 +237,7 @@ async fn distribution_shift() {
let server = Server::new().await; let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await; let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "retrieveVectors": true}); let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"semanticRatio": 1.0}, "retrieveVectors": true});
let (response, code) = index.search_post(search.clone()).await; let (response, code) = index.search_post(search.clone()).await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###); snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###);
@@ -274,7 +271,7 @@ async fn highlighter() {
let (response, code) = index let (response, code) = index
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0], .search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
"hybrid": {"embedder": "default", "semanticRatio": 0.2}, "hybrid": {"semanticRatio": 0.2},
"retrieveVectors": true, "retrieveVectors": true,
"attributesToHighlight": [ "attributesToHighlight": [
"desc", "desc",
@@ -290,7 +287,7 @@ async fn highlighter() {
let (response, code) = index let (response, code) = index
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0], .search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
"hybrid": {"embedder": "default", "semanticRatio": 0.8}, "hybrid": {"semanticRatio": 0.8},
"retrieveVectors": true, "retrieveVectors": true,
"showRankingScore": true, "showRankingScore": true,
"attributesToHighlight": [ "attributesToHighlight": [
@@ -307,7 +304,7 @@ async fn highlighter() {
// no highlighting on full semantic // no highlighting on full semantic
let (response, code) = index let (response, code) = index
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0], .search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
"hybrid": {"embedder": "default", "semanticRatio": 1.0}, "hybrid": {"semanticRatio": 1.0},
"retrieveVectors": true, "retrieveVectors": true,
"showRankingScore": true, "showRankingScore": true,
"attributesToHighlight": [ "attributesToHighlight": [
@@ -329,7 +326,7 @@ async fn invalid_semantic_ratio() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"embedder": "default", "semanticRatio": 1.2}}), json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 1.2}}),
) )
.await; .await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
@@ -344,7 +341,7 @@ async fn invalid_semantic_ratio() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"embedder": "default", "semanticRatio": -0.8}}), json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": -0.8}}),
) )
.await; .await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
@@ -360,7 +357,7 @@ async fn invalid_semantic_ratio() {
let (response, code) = index let (response, code) = index
.search_get( .search_get(
&yaup::to_string( &yaup::to_string(
&json!({"q": "Captain", "vector": [1.0, 1.0], "hybridEmbedder": "default", "hybridSemanticRatio": 1.2}), &json!({"q": "Captain", "vector": [1.0, 1.0], "hybridSemanticRatio": 1.2}),
) )
.unwrap(), .unwrap(),
) )
@@ -378,7 +375,7 @@ async fn invalid_semantic_ratio() {
let (response, code) = index let (response, code) = index
.search_get( .search_get(
&yaup::to_string( &yaup::to_string(
&json!({"q": "Captain", "vector": [1.0, 1.0], "hybridEmbedder": "default", "hybridSemanticRatio": -0.2}), &json!({"q": "Captain", "vector": [1.0, 1.0], "hybridSemanticRatio": -0.2}),
) )
.unwrap(), .unwrap(),
) )
@@ -401,7 +398,7 @@ async fn single_document() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"vector": [1.0, 3.0], "hybrid": {"semanticRatio": 1.0, "embedder": "default"}, "showRankingScore": true, "retrieveVectors": true}), json!({"vector": [1.0, 3.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}),
) )
.await; .await;
@@ -417,7 +414,7 @@ async fn query_combination() {
// search without query and vector, but with hybrid => still placeholder // search without query and vector, but with hybrid => still placeholder
let (response, code) = index let (response, code) = index
.search_post(json!({"hybrid": {"embedder": "default", "semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true})) .search_post(json!({"hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -426,7 +423,7 @@ async fn query_combination() {
// same with a different semantic ratio // same with a different semantic ratio
let (response, code) = index let (response, code) = index
.search_post(json!({"hybrid": {"embedder": "default", "semanticRatio": 0.76}, "showRankingScore": true, "retrieveVectors": true})) .search_post(json!({"hybrid": {"semanticRatio": 0.76}, "showRankingScore": true, "retrieveVectors": true}))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -435,7 +432,7 @@ async fn query_combination() {
// wrong vector dimensions // wrong vector dimensions
let (response, code) = index let (response, code) = index
.search_post(json!({"vector": [1.0, 0.0, 1.0], "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true})) .search_post(json!({"vector": [1.0, 0.0, 1.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}))
.await; .await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
@@ -450,7 +447,7 @@ async fn query_combination() {
// full vector // full vector
let (response, code) = index let (response, code) = index
.search_post(json!({"vector": [1.0, 0.0], "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true})) .search_post(json!({"vector": [1.0, 0.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -459,7 +456,7 @@ async fn query_combination() {
// full keyword, without a query // full keyword, without a query
let (response, code) = index let (response, code) = index
.search_post(json!({"vector": [1.0, 0.0], "hybrid": {"embedder": "default", "semanticRatio": 0.0}, "showRankingScore": true, "retrieveVectors": true})) .search_post(json!({"vector": [1.0, 0.0], "hybrid": {"semanticRatio": 0.0}, "showRankingScore": true, "retrieveVectors": true}))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -468,7 +465,7 @@ async fn query_combination() {
// query + vector, full keyword => keyword // query + vector, full keyword => keyword
let (response, code) = index let (response, code) = index
.search_post(json!({"q": "Captain", "vector": [1.0, 0.0], "hybrid": {"embedder": "default", "semanticRatio": 0.0}, "showRankingScore": true, "retrieveVectors": true})) .search_post(json!({"q": "Captain", "vector": [1.0, 0.0], "hybrid": {"semanticRatio": 0.0}, "showRankingScore": true, "retrieveVectors": true}))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -483,7 +480,7 @@ async fn query_combination() {
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###" snapshot!(response, @r###"
{ {
"message": "Invalid request: missing `hybrid` parameter when `vector` is present.", "message": "Invalid request: missing `hybrid` parameter when both `q` and `vector` are present.",
"code": "missing_search_hybrid", "code": "missing_search_hybrid",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#missing_search_hybrid" "link": "https://docs.meilisearch.com/errors#missing_search_hybrid"
@@ -493,7 +490,7 @@ async fn query_combination() {
// full vector, without a vector => error // full vector, without a vector => error
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "hybrid": {"semanticRatio": 1.0, "embedder": "default"}, "showRankingScore": true, "retrieveVectors": true}), json!({"q": "Captain", "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}),
) )
.await; .await;
@@ -510,7 +507,7 @@ async fn query_combination() {
// hybrid without a vector => full keyword // hybrid without a vector => full keyword
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Planet", "hybrid": {"semanticRatio": 0.99, "embedder": "default"}, "showRankingScore": true, "retrieveVectors": true}), json!({"q": "Planet", "hybrid": {"semanticRatio": 0.99}, "showRankingScore": true, "retrieveVectors": true}),
) )
.await; .await;
@@ -526,58 +523,7 @@ async fn retrieve_vectors() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "hybrid": {"embedder": "default", "semanticRatio": 0.2}, "retrieveVectors": true}), json!({"q": "Captain", "hybrid": {"semanticRatio": 0.2}, "retrieveVectors": true}),
)
.await;
snapshot!(code, @"200 OK");
insta::assert_json_snapshot!(response["hits"], {"[]._vectors.default.embeddings" => "[vectors]"}, @r###"
[
{
"title": "Captain Planet",
"desc": "He's not part of the Marvel Cinematic Universe",
"id": "2",
"_vectors": {
"default": {
"embeddings": "[vectors]",
"regenerate": true
}
}
},
{
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"id": "3",
"_vectors": {
"default": {
"embeddings": "[vectors]",
"regenerate": true
}
}
},
{
"title": "Shazam!",
"desc": "a Captain Marvel ersatz",
"id": "1",
"_vectors": {
"default": {
"embeddings": "[vectors]",
"regenerate": true
}
}
}
]
"###);
// use explicit `_vectors` in displayed attributes
let (response, code) = index
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
.await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
let (response, code) = index
.search_post(
json!({"q": "Captain", "hybrid": {"embedder": "default", "semanticRatio": 0.2}, "retrieveVectors": true}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -627,7 +573,7 @@ async fn retrieve_vectors() {
let (response, code) = index let (response, code) = index
.search_post( .search_post(
json!({"q": "Captain", "hybrid": {"embedder": "default", "semanticRatio": 0.2}, "retrieveVectors": true}), json!({"q": "Captain", "hybrid": {"semanticRatio": 0.2}, "retrieveVectors": true}),
) )
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");

View File

@@ -922,7 +922,7 @@ async fn invalid_locales() {
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
"message": "Unknown value `invalid` at `.locales[0]`: expected one of `af`, `ak`, `am`, `ar`, `az`, `be`, `bn`, `bg`, `ca`, `cs`, `da`, `de`, `el`, `en`, `eo`, `et`, `fi`, `fr`, `gu`, `he`, `hi`, `hr`, `hu`, `hy`, `id`, `it`, `jv`, `ja`, `kn`, `ka`, `km`, `ko`, `la`, `lv`, `lt`, `ml`, `mr`, `mk`, `my`, `ne`, `nl`, `nb`, `or`, `pa`, `fa`, `pl`, `pt`, `ro`, `ru`, `si`, `sk`, `sl`, `sn`, `es`, `sr`, `sv`, `ta`, `te`, `tl`, `th`, `tk`, `tr`, `uk`, `ur`, `uz`, `vi`, `yi`, `zh`, `zu`, `afr`, `aka`, `amh`, `ara`, `aze`, `bel`, `ben`, `bul`, `cat`, `ces`, `dan`, `deu`, `ell`, `eng`, `epo`, `est`, `fin`, `fra`, `guj`, `heb`, `hin`, `hrv`, `hun`, `hye`, `ind`, `ita`, `jav`, `jpn`, `kan`, `kat`, `khm`, `kor`, `lat`, `lav`, `lit`, `mal`, `mar`, `mkd`, `mya`, `nep`, `nld`, `nob`, `ori`, `pan`, `pes`, `pol`, `por`, `ron`, `rus`, `sin`, `slk`, `slv`, `sna`, `spa`, `srp`, `swe`, `tam`, `tel`, `tgl`, `tha`, `tuk`, `tur`, `ukr`, `urd`, `uzb`, `vie`, `yid`, `zho`, `zul`, `cmn`", "message": "Unknown value `invalid` at `.locales[0]`: expected one of `af`, `ak`, `am`, `ar`, `az`, `be`, `bn`, `bg`, `ca`, `cs`, `zh`, `da`, `de`, `el`, `en`, `eo`, `et`, `fi`, `fr`, `gu`, `he`, `hi`, `hr`, `hu`, `hy`, `id`, `it`, `jv`, `ja`, `kn`, `ka`, `km`, `ko`, `la`, `lv`, `lt`, `ml`, `mr`, `mk`, `my`, `ne`, `nl`, `nb`, `or`, `pa`, `fa`, `pl`, `pt`, `ro`, `ru`, `si`, `sk`, `sl`, `sn`, `es`, `sr`, `sv`, `ta`, `te`, `tl`, `th`, `tk`, `tr`, `uk`, `ur`, `uz`, `vi`, `yi`, `zu`, `afr`, `aka`, `amh`, `ara`, `aze`, `bel`, `ben`, `bul`, `cat`, `ces`, `cmn`, `dan`, `deu`, `ell`, `eng`, `epo`, `est`, `fin`, `fra`, `guj`, `heb`, `hin`, `hrv`, `hun`, `hye`, `ind`, `ita`, `jav`, `jpn`, `kan`, `kat`, `khm`, `kor`, `lat`, `lav`, `lit`, `mal`, `mar`, `mkd`, `mya`, `nep`, `nld`, `nob`, `ori`, `pan`, `pes`, `pol`, `por`, `ron`, `rus`, `sin`, `slk`, `slv`, `sna`, `spa`, `srp`, `swe`, `tam`, `tel`, `tgl`, `tha`, `tuk`, `tur`, `ukr`, `urd`, `uzb`, `vie`, `yid`, `zul`",
"code": "invalid_search_locales", "code": "invalid_search_locales",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_locales" "link": "https://docs.meilisearch.com/errors#invalid_search_locales"
@@ -935,7 +935,7 @@ async fn invalid_locales() {
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
"message": "Invalid value in parameter `locales`: Unsupported locale `invalid`, expected one of af, ak, am, ar, az, be, bg, bn, ca, cs, da, de, el, en, eo, es, et, fa, fi, fr, gu, he, hi, hr, hu, hy, id, it, ja, jv, ka, km, kn, ko, la, lt, lv, mk, ml, mr, my, nb, ne, nl, or, pa, pl, pt, ro, ru, si, sk, sl, sn, sr, sv, ta, te, th, tk, tl, tr, uk, ur, uz, vi, yi, zh, zu, afr, aka, amh, ara, aze, bel, ben, bul, cat, ces, cmn, dan, deu, ell, eng, epo, est, fin, fra, guj, heb, hin, hrv, hun, hye, ind, ita, jav, jpn, kan, kat, khm, kor, lat, lav, lit, mal, mar, mkd, mya, nep, nld, nob, ori, pan, pes, pol, por, ron, rus, sin, slk, slv, sna, spa, srp, swe, tam, tel, tgl, tha, tuk, tur, ukr, urd, uzb, vie, yid, zho, zul", "message": "Invalid value in parameter `locales`: Unsupported locale `invalid`, expected one of af, ak, am, ar, az, be, bn, bg, ca, cs, zh, da, de, el, en, eo, et, fi, fr, gu, he, hi, hr, hu, hy, id, it, jv, ja, kn, ka, km, ko, la, lv, lt, ml, mr, mk, my, ne, nl, nb, or, pa, fa, pl, pt, ro, ru, si, sk, sl, sn, es, sr, sv, ta, te, tl, th, tk, tr, uk, ur, uz, vi, yi, zu, afr, aka, amh, ara, aze, bel, ben, bul, cat, ces, cmn, dan, deu, ell, eng, epo, est, fin, fra, guj, heb, hin, hrv, hun, hye, ind, ita, jav, jpn, kan, kat, khm, kor, lat, lav, lit, mal, mar, mkd, mya, nep, nld, nob, ori, pan, pes, pol, por, ron, rus, sin, slk, slv, sna, spa, srp, swe, tam, tel, tgl, tha, tuk, tur, ukr, urd, uzb, vie, yid, zul",
"code": "invalid_search_locales", "code": "invalid_search_locales",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_locales" "link": "https://docs.meilisearch.com/errors#invalid_search_locales"
@@ -957,7 +957,7 @@ async fn invalid_localized_attributes_rules() {
.await; .await;
snapshot!(response, @r###" snapshot!(response, @r###"
{ {
"message": "Unknown value `japan` at `.localizedAttributes[0].locales[0]`: expected one of `af`, `ak`, `am`, `ar`, `az`, `be`, `bn`, `bg`, `ca`, `cs`, `da`, `de`, `el`, `en`, `eo`, `et`, `fi`, `fr`, `gu`, `he`, `hi`, `hr`, `hu`, `hy`, `id`, `it`, `jv`, `ja`, `kn`, `ka`, `km`, `ko`, `la`, `lv`, `lt`, `ml`, `mr`, `mk`, `my`, `ne`, `nl`, `nb`, `or`, `pa`, `fa`, `pl`, `pt`, `ro`, `ru`, `si`, `sk`, `sl`, `sn`, `es`, `sr`, `sv`, `ta`, `te`, `tl`, `th`, `tk`, `tr`, `uk`, `ur`, `uz`, `vi`, `yi`, `zh`, `zu`, `afr`, `aka`, `amh`, `ara`, `aze`, `bel`, `ben`, `bul`, `cat`, `ces`, `dan`, `deu`, `ell`, `eng`, `epo`, `est`, `fin`, `fra`, `guj`, `heb`, `hin`, `hrv`, `hun`, `hye`, `ind`, `ita`, `jav`, `jpn`, `kan`, `kat`, `khm`, `kor`, `lat`, `lav`, `lit`, `mal`, `mar`, `mkd`, `mya`, `nep`, `nld`, `nob`, `ori`, `pan`, `pes`, `pol`, `por`, `ron`, `rus`, `sin`, `slk`, `slv`, `sna`, `spa`, `srp`, `swe`, `tam`, `tel`, `tgl`, `tha`, `tuk`, `tur`, `ukr`, `urd`, `uzb`, `vie`, `yid`, `zho`, `zul`, `cmn`", "message": "Unknown value `japan` at `.localizedAttributes[0].locales[0]`: expected one of `af`, `ak`, `am`, `ar`, `az`, `be`, `bn`, `bg`, `ca`, `cs`, `zh`, `da`, `de`, `el`, `en`, `eo`, `et`, `fi`, `fr`, `gu`, `he`, `hi`, `hr`, `hu`, `hy`, `id`, `it`, `jv`, `ja`, `kn`, `ka`, `km`, `ko`, `la`, `lv`, `lt`, `ml`, `mr`, `mk`, `my`, `ne`, `nl`, `nb`, `or`, `pa`, `fa`, `pl`, `pt`, `ro`, `ru`, `si`, `sk`, `sl`, `sn`, `es`, `sr`, `sv`, `ta`, `te`, `tl`, `th`, `tk`, `tr`, `uk`, `ur`, `uz`, `vi`, `yi`, `zu`, `afr`, `aka`, `amh`, `ara`, `aze`, `bel`, `ben`, `bul`, `cat`, `ces`, `cmn`, `dan`, `deu`, `ell`, `eng`, `epo`, `est`, `fin`, `fra`, `guj`, `heb`, `hin`, `hrv`, `hun`, `hye`, `ind`, `ita`, `jav`, `jpn`, `kan`, `kat`, `khm`, `kor`, `lat`, `lav`, `lit`, `mal`, `mar`, `mkd`, `mya`, `nep`, `nld`, `nob`, `ori`, `pan`, `pes`, `pol`, `por`, `ron`, `rus`, `sin`, `slk`, `slv`, `sna`, `spa`, `srp`, `swe`, `tam`, `tel`, `tgl`, `tha`, `tuk`, `tur`, `ukr`, `urd`, `uzb`, `vie`, `yid`, `zul`",
"code": "invalid_settings_localized_attributes", "code": "invalid_settings_localized_attributes",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_localized_attributes" "link": "https://docs.meilisearch.com/errors#invalid_settings_localized_attributes"
@@ -1143,195 +1143,3 @@ async fn facet_search_with_localized_attributes() {
} }
"###); "###);
} }
#[actix_rt::test]
async fn swedish_search() {
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{"id": "tra1-1", "product": "trä"},
{"id": "tra2-1", "product": "traktor"},
{"id": "tra1-2", "product": "träbjälke"},
{"id": "tra2-2", "product": "trafiksignal"},
]);
index.add_documents(documents, None).await;
let (_response, _) = index
.update_settings(json!({
"searchableAttributes": ["product"],
"localizedAttributes": [
// force swedish
{"attributePatterns": ["product"], "locales": ["swe"]}
]
}))
.await;
index.wait_task(1).await;
// infer swedish
index
.search(json!({"q": "trä", "attributesToRetrieve": ["product"]}), |response, code| {
snapshot!(response, @r###"
{
"hits": [
{
"product": "trä"
},
{
"product": "träbjälke"
}
],
"query": "trä",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
}
"###);
snapshot!(code, @"200 OK");
})
.await;
index
.search(json!({"q": "tra", "attributesToRetrieve": ["product"]}), |response, code| {
snapshot!(response, @r###"
{
"hits": [
{
"product": "traktor"
},
{
"product": "trafiksignal"
}
],
"query": "tra",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
}
"###);
snapshot!(code, @"200 OK");
})
.await;
// force swedish
index
.search(
json!({"q": "trä", "locales": ["swe"], "attributesToRetrieve": ["product"]}),
|response, code| {
snapshot!(response, @r###"
{
"hits": [
{
"product": "trä"
},
{
"product": "träbjälke"
}
],
"query": "trä",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
}
"###);
snapshot!(code, @"200 OK");
},
)
.await;
index
.search(
json!({"q": "tra", "locales": ["swe"], "attributesToRetrieve": ["product"]}),
|response, code| {
snapshot!(response, @r###"
{
"hits": [
{
"product": "traktor"
},
{
"product": "trafiksignal"
}
],
"query": "tra",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
}
"###);
snapshot!(code, @"200 OK");
},
)
.await;
}
#[actix_rt::test]
async fn german_search() {
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{"id": 1, "product": "Interkulturalität"},
{"id": 2, "product": "Wissensorganisation"},
]);
index.add_documents(documents, None).await;
let (_response, _) = index
.update_settings(json!({
"searchableAttributes": ["product"],
"localizedAttributes": [
// force swedish
{"attributePatterns": ["product"], "locales": ["deu"]}
]
}))
.await;
index.wait_task(1).await;
// infer swedish
index
.search(
json!({"q": "kulturalität", "attributesToRetrieve": ["product"]}),
|response, code| {
snapshot!(response, @r###"
{
"hits": [
{
"product": "Interkulturalität"
}
],
"query": "kulturalität",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
},
)
.await;
index
.search(
json!({"q": "organisation", "attributesToRetrieve": ["product"]}),
|response, code| {
snapshot!(response, @r###"
{
"hits": [
{
"product": "Wissensorganisation"
}
],
"query": "organisation",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1
}
"###);
snapshot!(code, @"200 OK");
},
)
.await;
}

View File

@@ -1099,28 +1099,22 @@ async fn experimental_feature_vector_store() {
index.add_documents(json!(documents), None).await; index.add_documents(json!(documents), None).await;
index.wait_task(0).await; index.wait_task(0).await;
let (response, code) = index index
.search_post(json!({ .search(json!({
"vector": [1.0, 2.0, 3.0], "vector": [1.0, 2.0, 3.0],
"hybrid": {
"embedder": "manual",
},
"showRankingScore": true "showRankingScore": true
})) }), |response, code|{
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Passing `vector` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
"code": "feature_not_enabled",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
}
"###);
})
.await; .await;
{
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Passing `vector` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
"code": "feature_not_enabled",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
}
"###);
}
index index
.search(json!({ .search(json!({
"retrieveVectors": true, "retrieveVectors": true,
@@ -1168,9 +1162,6 @@ async fn experimental_feature_vector_store() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"vector": [1.0, 2.0, 3.0], "vector": [1.0, 2.0, 3.0],
"hybrid": {
"embedder": "manual",
},
"showRankingScore": true, "showRankingScore": true,
"retrieveVectors": true, "retrieveVectors": true,
})) }))

View File

@@ -18,7 +18,7 @@ async fn similar_unexisting_index() {
}); });
index index
.similar(json!({"id": 287947, "embedder": "manual"}), |response, code| { .similar(json!({"id": 287947}), |response, code| {
assert_eq!(code, 404); assert_eq!(code, 404);
assert_eq!(response, expected_response); assert_eq!(response, expected_response);
}) })
@@ -44,7 +44,7 @@ async fn similar_feature_not_enabled() {
let server = Server::new().await; let server = Server::new().await;
let index = server.index("test"); let index = server.index("test");
let (response, code) = index.similar_post(json!({"id": 287947, "embedder": "manual"})).await; let (response, code) = index.similar_post(json!({"id": 287947})).await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
@@ -199,8 +199,7 @@ async fn similar_not_found_id() {
snapshot!(code, @"202 Accepted"); snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await; server.wait_task(response.uid()).await;
let (response, code) = let (response, code) = index.similar_post(json!({"id": "definitely-doesnt-exist"})).await;
index.similar_post(json!({"id": "definitely-doesnt-exist", "embedder": "manual"})).await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
@@ -231,8 +230,7 @@ async fn similar_bad_offset() {
snapshot!(code, @"202 Accepted"); snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await; server.wait_task(response.uid()).await;
let (response, code) = let (response, code) = index.similar_post(json!({"id": 287947, "offset": "doggo"})).await;
index.similar_post(json!({"id": 287947, "offset": "doggo", "embedder": "manual"})).await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
@@ -243,7 +241,7 @@ async fn similar_bad_offset() {
} }
"###); "###);
let (response, code) = index.similar_get("?id=287947&offset=doggo&embedder=manual").await; let (response, code) = index.similar_get("?id=287947&offset=doggo").await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
@@ -274,8 +272,7 @@ async fn similar_bad_limit() {
snapshot!(code, @"202 Accepted"); snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await; server.wait_task(response.uid()).await;
let (response, code) = let (response, code) = index.similar_post(json!({"id": 287947, "limit": "doggo"})).await;
index.similar_post(json!({"id": 287947, "limit": "doggo", "embedder": "manual"})).await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
@@ -286,7 +283,7 @@ async fn similar_bad_limit() {
} }
"###); "###);
let (response, code) = index.similar_get("?id=287946&limit=doggo&embedder=manual").await; let (response, code) = index.similar_get("?id=287946&limit=doggo").await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
@@ -326,8 +323,7 @@ async fn similar_bad_filter() {
snapshot!(code, @"202 Accepted"); snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await; index.wait_task(value.uid()).await;
let (response, code) = let (response, code) = index.similar_post(json!({ "id": 287947, "filter": true })).await;
index.similar_post(json!({ "id": 287947, "filter": true, "embedder": "manual" })).await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
@@ -365,7 +361,7 @@ async fn filter_invalid_syntax_object() {
index.wait_task(value.uid()).await; index.wait_task(value.uid()).await;
index index
.similar(json!({"id": 287947, "filter": "title & Glass", "embedder": "manual"}), |response, code| { .similar(json!({"id": 287947, "filter": "title & Glass"}), |response, code| {
snapshot!(response, @r###" snapshot!(response, @r###"
{ {
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
@@ -404,7 +400,7 @@ async fn filter_invalid_syntax_array() {
index.wait_task(value.uid()).await; index.wait_task(value.uid()).await;
index index
.similar(json!({"id": 287947, "filter": ["title & Glass"], "embedder": "manual"}), |response, code| { .similar(json!({"id": 287947, "filter": ["title & Glass"]}), |response, code| {
snapshot!(response, @r###" snapshot!(response, @r###"
{ {
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
@@ -450,7 +446,7 @@ async fn filter_invalid_syntax_string() {
}); });
index index
.similar( .similar(
json!({"id": 287947, "filter": "title = Glass XOR title = Glass", "embedder": "manual"}), json!({"id": 287947, "filter": "title = Glass XOR title = Glass"}),
|response, code| { |response, code| {
assert_eq!(response, expected_response); assert_eq!(response, expected_response);
assert_eq!(code, 400); assert_eq!(code, 400);
@@ -490,13 +486,10 @@ async fn filter_invalid_attribute_array() {
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter" "link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}); });
index index
.similar( .similar(json!({"id": 287947, "filter": ["many = Glass"]}), |response, code| {
json!({"id": 287947, "filter": ["many = Glass"], "embedder": "manual"}), assert_eq!(response, expected_response);
|response, code| { assert_eq!(code, 400);
assert_eq!(response, expected_response); })
assert_eq!(code, 400);
},
)
.await; .await;
} }
@@ -531,13 +524,10 @@ async fn filter_invalid_attribute_string() {
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter" "link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}); });
index index
.similar( .similar(json!({"id": 287947, "filter": "many = Glass"}), |response, code| {
json!({"id": 287947, "filter": "many = Glass", "embedder": "manual"}), assert_eq!(response, expected_response);
|response, code| { assert_eq!(code, 400);
assert_eq!(response, expected_response); })
assert_eq!(code, 400);
},
)
.await; .await;
} }
@@ -572,13 +562,10 @@ async fn filter_reserved_geo_attribute_array() {
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter" "link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}); });
index index
.similar( .similar(json!({"id": 287947, "filter": ["_geo = Glass"]}), |response, code| {
json!({"id": 287947, "filter": ["_geo = Glass"], "embedder": "manual"}), assert_eq!(response, expected_response);
|response, code| { assert_eq!(code, 400);
assert_eq!(response, expected_response); })
assert_eq!(code, 400);
},
)
.await; .await;
} }
@@ -613,13 +600,10 @@ async fn filter_reserved_geo_attribute_string() {
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter" "link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}); });
index index
.similar( .similar(json!({"id": 287947, "filter": "_geo = Glass"}), |response, code| {
json!({"id": 287947, "filter": "_geo = Glass", "embedder": "manual"}), assert_eq!(response, expected_response);
|response, code| { assert_eq!(code, 400);
assert_eq!(response, expected_response); })
assert_eq!(code, 400);
},
)
.await; .await;
} }
@@ -654,13 +638,10 @@ async fn filter_reserved_attribute_array() {
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter" "link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}); });
index index
.similar( .similar(json!({"id": 287947, "filter": ["_geoDistance = Glass"]}), |response, code| {
json!({"id": 287947, "filter": ["_geoDistance = Glass"], "embedder": "manual"}), assert_eq!(response, expected_response);
|response, code| { assert_eq!(code, 400);
assert_eq!(response, expected_response); })
assert_eq!(code, 400);
},
)
.await; .await;
} }
@@ -695,13 +676,10 @@ async fn filter_reserved_attribute_string() {
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter" "link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}); });
index index
.similar( .similar(json!({"id": 287947, "filter": "_geoDistance = Glass"}), |response, code| {
json!({"id": 287947, "filter": "_geoDistance = Glass", "embedder": "manual"}), assert_eq!(response, expected_response);
|response, code| { assert_eq!(code, 400);
assert_eq!(response, expected_response); })
assert_eq!(code, 400);
},
)
.await; .await;
} }
@@ -736,13 +714,10 @@ async fn filter_reserved_geo_point_array() {
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter" "link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}); });
index index
.similar( .similar(json!({"id": 287947, "filter": ["_geoPoint = Glass"]}), |response, code| {
json!({"id": 287947, "filter": ["_geoPoint = Glass"], "embedder": "manual"}), assert_eq!(response, expected_response);
|response, code| { assert_eq!(code, 400);
assert_eq!(response, expected_response); })
assert_eq!(code, 400);
},
)
.await; .await;
} }
@@ -777,13 +752,10 @@ async fn filter_reserved_geo_point_string() {
"link": "https://docs.meilisearch.com/errors#invalid_similar_filter" "link": "https://docs.meilisearch.com/errors#invalid_similar_filter"
}); });
index index
.similar( .similar(json!({"id": 287947, "filter": "_geoPoint = Glass"}), |response, code| {
json!({"id": 287947, "filter": "_geoPoint = Glass", "embedder": "manual"}), assert_eq!(response, expected_response);
|response, code| { assert_eq!(code, 400);
assert_eq!(response, expected_response); })
assert_eq!(code, 400);
},
)
.await; .await;
} }
@@ -793,8 +765,7 @@ async fn similar_bad_retrieve_vectors() {
server.set_features(json!({"vectorStore": true})).await; server.set_features(json!({"vectorStore": true})).await;
let index = server.index("test"); let index = server.index("test");
let (response, code) = let (response, code) = index.similar_post(json!({"retrieveVectors": "doggo"})).await;
index.similar_post(json!({"retrieveVectors": "doggo", "embedder": "manual"})).await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {
@@ -805,8 +776,7 @@ async fn similar_bad_retrieve_vectors() {
} }
"###); "###);
let (response, code) = let (response, code) = index.similar_post(json!({"retrieveVectors": [true]})).await;
index.similar_post(json!({"retrieveVectors": [true], "embedder": "manual"})).await;
snapshot!(code, @"400 Bad Request"); snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###" snapshot!(json_string!(response), @r###"
{ {

View File

@@ -80,11 +80,9 @@ async fn basic() {
index.wait_task(value.uid()).await; index.wait_task(value.uid()).await;
index index
.similar( .similar(json!({"id": 143, "retrieveVectors": true}), |response, code| {
json!({"id": 143, "retrieveVectors": true, "embedder": "manual"}), snapshot!(code, @"200 OK");
|response, code| { snapshot!(json_string!(response["hits"]), @r###"
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
[ [
{ {
"title": "Escape Room", "title": "Escape Room",
@@ -156,16 +154,13 @@ async fn basic() {
} }
] ]
"###); "###);
}, })
)
.await; .await;
index index
.similar( .similar(json!({"id": "299537", "retrieveVectors": true}), |response, code| {
json!({"id": "299537", "retrieveVectors": true, "embedder": "manual"}), snapshot!(code, @"200 OK");
|response, code| { snapshot!(json_string!(response["hits"]), @r###"
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
[ [
{ {
"title": "How to Train Your Dragon: The Hidden World", "title": "How to Train Your Dragon: The Hidden World",
@@ -237,8 +232,7 @@ async fn basic() {
} }
] ]
"###); "###);
}, })
)
.await; .await;
} }
@@ -278,7 +272,7 @@ async fn ranking_score_threshold() {
index index
.similar( .similar(
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0, "retrieveVectors": true, "embedder": "manual"}), json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0, "retrieveVectors": true}),
|response, code| { |response, code| {
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"4"); meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"4");
@@ -364,7 +358,7 @@ async fn ranking_score_threshold() {
index index
.similar( .similar(
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.2, "retrieveVectors": true, "embedder": "manual"}), json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.2, "retrieveVectors": true}),
|response, code| { |response, code| {
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"3"); meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"3");
@@ -432,7 +426,7 @@ async fn ranking_score_threshold() {
index index
.similar( .similar(
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.3, "retrieveVectors": true, "embedder": "manual"}), json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.3, "retrieveVectors": true}),
|response, code| { |response, code| {
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"2"); meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"2");
@@ -482,7 +476,7 @@ async fn ranking_score_threshold() {
index index
.similar( .similar(
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.6, "retrieveVectors": true, "embedder": "manual"}), json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.6, "retrieveVectors": true}),
|response, code| { |response, code| {
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"1"); meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"1");
@@ -514,7 +508,7 @@ async fn ranking_score_threshold() {
index index
.similar( .similar(
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.9, "retrieveVectors": true, "embedder": "manual"}), json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.9, "retrieveVectors": true}),
|response, code| { |response, code| {
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @"[]"); snapshot!(json_string!(response["hits"]), @"[]");
@@ -559,7 +553,7 @@ async fn filter() {
index index
.similar( .similar(
json!({"id": 522681, "filter": "release_year = 2019", "retrieveVectors": true, "embedder": "manual"}), json!({"id": 522681, "filter": "release_year = 2019", "retrieveVectors": true}),
|response, code| { |response, code| {
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###" snapshot!(json_string!(response["hits"]), @r###"
@@ -623,7 +617,7 @@ async fn filter() {
index index
.similar( .similar(
json!({"id": 522681, "filter": "release_year < 2000", "retrieveVectors": true, "embedder": "manual"}), json!({"id": 522681, "filter": "release_year < 2000", "retrieveVectors": true}),
|response, code| { |response, code| {
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###" snapshot!(json_string!(response["hits"]), @r###"
@@ -687,11 +681,9 @@ async fn limit_and_offset() {
index.wait_task(value.uid()).await; index.wait_task(value.uid()).await;
index index
.similar( .similar(json!({"id": 143, "limit": 1, "retrieveVectors": true}), |response, code| {
json!({"id": 143, "limit": 1, "retrieveVectors": true, "embedder": "manual"}), snapshot!(code, @"200 OK");
|response, code| { snapshot!(json_string!(response["hits"]), @r###"
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
[ [
{ {
"title": "Escape Room", "title": "Escape Room",
@@ -712,13 +704,12 @@ async fn limit_and_offset() {
} }
] ]
"###); "###);
}, })
)
.await; .await;
index index
.similar( .similar(
json!({"id": 143, "limit": 1, "offset": 1, "retrieveVectors": true, "embedder": "manual"}), json!({"id": 143, "limit": 1, "offset": 1, "retrieveVectors": true}),
|response, code| { |response, code| {
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###" snapshot!(json_string!(response["hits"]), @r###"

View File

@@ -1,380 +0,0 @@
use meili_snap::{json_string, snapshot};
use crate::common::{GetAllDocumentsOptions, Server};
use crate::json;
use crate::vector::generate_default_user_provided_documents;
#[actix_rt::test]
async fn retrieve_binary_quantize_status_in_the_settings() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3}"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": false,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3,"binaryQuantized":false}"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3,"binaryQuantized":true}"###);
}
#[actix_rt::test]
async fn binary_quantize_before_sending_documents() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [-1.2, -2.3, 3.2] }},
{"id": 1, "name": "echo", "_vectors": { "manual": [2.5, 1.5, -130] }},
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
// Make sure the documents are binary quantized
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
{
"results": [
{
"id": 0,
"name": "kefir",
"_vectors": {
"manual": {
"embeddings": [
[
-1.0,
-1.0,
1.0
]
],
"regenerate": false
}
}
},
{
"id": 1,
"name": "echo",
"_vectors": {
"manual": {
"embeddings": [
[
1.0,
1.0,
-1.0
]
],
"regenerate": false
}
}
}
],
"offset": 0,
"limit": 20,
"total": 2
}
"###);
}
#[actix_rt::test]
async fn binary_quantize_after_sending_documents() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [-1.2, -2.3, 3.2] }},
{"id": 1, "name": "echo", "_vectors": { "manual": [2.5, 1.5, -130] }},
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
// Make sure the documents are binary quantized
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
{
"results": [
{
"id": 0,
"name": "kefir",
"_vectors": {
"manual": {
"embeddings": [
[
-1.0,
-1.0,
1.0
]
],
"regenerate": false
}
}
},
{
"id": 1,
"name": "echo",
"_vectors": {
"manual": {
"embeddings": [
[
1.0,
1.0,
-1.0
]
],
"regenerate": false
}
}
}
],
"offset": 0,
"limit": 20,
"total": 2
}
"###);
}
#[actix_rt::test]
async fn try_to_disable_binary_quantization() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": false,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
let ret = server.wait_task(response.uid()).await;
snapshot!(ret, @r###"
{
"uid": "[uid]",
"indexUid": "doggo",
"status": "failed",
"type": "settingsUpdate",
"canceledBy": null,
"details": {
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": false
}
}
},
"error": {
"message": "`.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
"code": "invalid_settings_embedders",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"###);
}
#[actix_rt::test]
async fn binary_quantize_clear_documents() {
let server = Server::new().await;
let index = generate_default_user_provided_documents(&server).await;
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (value, _code) = index.clear_all_documents().await;
index.wait_task(value.uid()).await.succeeded();
// Make sure the documents DB has been cleared
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
{
"results": [],
"offset": 0,
"limit": 20,
"total": 0
}
"###);
// Make sure the arroy DB has been cleared
let (documents, _code) =
index.search_post(json!({ "hybrid": { "embedder": "manual" }, "vector": [1, 1, 1] })).await;
snapshot!(documents, @r###"
{
"hits": [],
"query": "",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"semanticHitCount": 0
}
"###);
}

View File

@@ -1,4 +1,3 @@
mod binary_quantized;
mod openai; mod openai;
mod rest; mod rest;
mod settings; mod settings;
@@ -625,8 +624,7 @@ async fn clear_documents() {
"###); "###);
// Make sure the arroy DB has been cleared // Make sure the arroy DB has been cleared
let (documents, _code) = let (documents, _code) = index.search_post(json!({ "vector": [1, 1, 1] })).await;
index.search_post(json!({ "vector": [1, 1, 1], "hybrid": {"embedder": "manual"} })).await;
snapshot!(documents, @r###" snapshot!(documents, @r###"
{ {
"hits": [], "hits": [],
@@ -687,11 +685,7 @@ async fn add_remove_one_vector_4588() {
let task = index.wait_task(value.uid()).await; let task = index.wait_task(value.uid()).await;
snapshot!(task, name: "document-deleted"); snapshot!(task, name: "document-deleted");
let (documents, _code) = index let (documents, _code) = index.search_post(json!({"vector": [1, 1, 1] })).await;
.search_post(
json!({"vector": [1, 1, 1], "hybrid": {"semanticRatio": 1.0, "embedder": "manual"} }),
)
.await;
snapshot!(documents, @r###" snapshot!(documents, @r###"
{ {
"hits": [ "hits": [

View File

@@ -449,7 +449,7 @@ async fn it_works() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "chien de chasse", "q": "chien de chasse",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"}, "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -489,7 +489,7 @@ async fn it_works() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "petit chien", "q": "petit chien",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -529,7 +529,7 @@ async fn it_works() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "grand chien de berger des montagnes", "q": "grand chien de berger des montagnes",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -616,7 +616,7 @@ async fn tokenize_long_text() {
"q": "grand chien de berger des montagnes", "q": "grand chien de berger des montagnes",
"showRankingScore": true, "showRankingScore": true,
"attributesToRetrieve": ["id"], "attributesToRetrieve": ["id"],
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1064,7 +1064,7 @@ async fn smaller_dimensions() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "chien de chasse", "q": "chien de chasse",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1104,7 +1104,7 @@ async fn smaller_dimensions() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "petit chien", "q": "petit chien",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1144,7 +1144,7 @@ async fn smaller_dimensions() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "grand chien de berger des montagnes", "q": "grand chien de berger des montagnes",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1295,7 +1295,7 @@ async fn small_embedding_model() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "chien de chasse", "q": "chien de chasse",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1335,7 +1335,7 @@ async fn small_embedding_model() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "petit chien", "q": "petit chien",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1375,7 +1375,7 @@ async fn small_embedding_model() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "grand chien de berger des montagnes", "q": "grand chien de berger des montagnes",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1525,7 +1525,7 @@ async fn legacy_embedding_model() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "chien de chasse", "q": "chien de chasse",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1565,7 +1565,7 @@ async fn legacy_embedding_model() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "petit chien", "q": "petit chien",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1605,7 +1605,7 @@ async fn legacy_embedding_model() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "grand chien de berger des montagnes", "q": "grand chien de berger des montagnes",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1756,7 +1756,7 @@ async fn it_still_works() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "chien de chasse", "q": "chien de chasse",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1796,7 +1796,7 @@ async fn it_still_works() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "petit chien", "q": "petit chien",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");
@@ -1836,7 +1836,7 @@ async fn it_still_works() {
let (response, code) = index let (response, code) = index
.search_post(json!({ .search_post(json!({
"q": "grand chien de berger des montagnes", "q": "grand chien de berger des montagnes",
"hybrid": {"semanticRatio": 1.0, "embedder": "default"} "hybrid": {"semanticRatio": 1.0}
})) }))
.await; .await;
snapshot!(code, @"200 OK"); snapshot!(code, @"200 OK");

View File

@@ -4,53 +4,6 @@ use crate::common::{GetAllDocumentsOptions, Server};
use crate::json; use crate::json;
use crate::vector::generate_default_user_provided_documents; use crate::vector::generate_default_user_provided_documents;
#[actix_rt::test]
async fn field_unavailable_for_source() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": { "manual": {"source": "userProvided", "documentTemplate": "{{doc.documentTemplate}}"}},
}))
.await;
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "`.embedders.manual`: Field `documentTemplate` unavailable for source `userProvided` (only available for sources: `huggingFace`, `openAi`, `ollama`, `rest`). Available fields: `source`, `dimensions`, `distribution`, `binaryQuantized`",
"code": "invalid_settings_embedders",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": { "default": {"source": "openAi", "revision": "42"}},
}))
.await;
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "`.embedders.default`: Field `revision` unavailable for source `openAi` (only available for sources: `huggingFace`). Available fields: `source`, `model`, `apiKey`, `documentTemplate`, `dimensions`, `distribution`, `url`, `binaryQuantized`",
"code": "invalid_settings_embedders",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"
}
"###);
}
#[actix_rt::test] #[actix_rt::test]
async fn update_embedder() { async fn update_embedder() {
let server = Server::new().await; let server = Server::new().await;
@@ -265,8 +218,7 @@ async fn reset_embedder_documents() {
"###); "###);
// Make sure the arroy DB has been cleared // Make sure the arroy DB has been cleared
let (documents, _code) = let (documents, _code) = index.search_post(json!({ "vector": [1, 1, 1] })).await;
index.search_post(json!({ "vector": [1, 1, 1], "hybrid": {"embedder": "default"} })).await;
snapshot!(json_string!(documents), @r###" snapshot!(json_string!(documents), @r###"
{ {
"message": "Cannot find embedder with name `default`.", "message": "Cannot find embedder with name `default`.",

View File

@@ -17,7 +17,7 @@ bincode = "1.3.3"
bstr = "1.9.1" bstr = "1.9.1"
bytemuck = { version = "1.16.1", features = ["extern_crate_alloc"] } bytemuck = { version = "1.16.1", features = ["extern_crate_alloc"] }
byteorder = "1.5.0" byteorder = "1.5.0"
charabia = { version = "0.9.1", default-features = false } charabia = { version = "0.9.0", default-features = false }
concat-arrays = "0.1.2" concat-arrays = "0.1.2"
crossbeam-channel = "0.5.13" crossbeam-channel = "0.5.13"
deserr = "0.6.2" deserr = "0.6.2"
@@ -79,8 +79,8 @@ hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls",
] } ] }
tiktoken-rs = "0.5.9" tiktoken-rs = "0.5.9"
liquid = "0.26.6" liquid = "0.26.6"
rhai = { git = "https://github.com/rhaiscript/rhai", rev = "ef3df63121d27aacd838f366f2b83fd65f20a1e4", features = ["serde", "no_module", "no_custom_syntax", "no_time", "sync"] } rhai = { version = "1.19.0", features = ["serde", "no_module", "no_custom_syntax", "no_time", "sync"] }
arroy = "0.5.0" arroy = "0.4.0"
rand = "0.8.5" rand = "0.8.5"
tracing = "0.1.40" tracing = "0.1.40"
ureq = { version = "2.10.0", features = ["json"] } ureq = { version = "2.10.0", features = ["json"] }
@@ -98,7 +98,14 @@ rand = { version = "0.8.5", features = ["small_rng"] }
[features] [features]
all-tokenizations = [ all-tokenizations = [
"charabia/default", "charabia/chinese",
"charabia/hebrew",
"charabia/japanese",
"charabia/thai",
"charabia/korean",
"charabia/greek",
"charabia/khmer",
"charabia/vietnamese",
] ]
# Use POSIX semaphores instead of SysV semaphores in LMDB # Use POSIX semaphores instead of SysV semaphores in LMDB
@@ -131,14 +138,8 @@ khmer = ["charabia/khmer"]
# allow vietnamese specialized tokenization # allow vietnamese specialized tokenization
vietnamese = ["charabia/vietnamese"] vietnamese = ["charabia/vietnamese"]
# allow german specialized tokenization
german = ["charabia/german-segmentation"]
# force swedish character recomposition # force swedish character recomposition
swedish-recomposition = ["charabia/swedish-recomposition"] swedish-recomposition = ["charabia/swedish-recomposition"]
# allow turkish specialized tokenization
turkish = ["charabia/turkish"]
# allow CUDA support, see <https://github.com/meilisearch/meilisearch/issues/4306> # allow CUDA support, see <https://github.com/meilisearch/meilisearch/issues/4306>
cuda = ["candle-core/cuda"] cuda = ["candle-core/cuda"]

View File

@@ -258,10 +258,6 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco
}, },
#[error("`.embedders.{embedder_name}.dimensions`: `dimensions` cannot be zero")] #[error("`.embedders.{embedder_name}.dimensions`: `dimensions` cannot be zero")]
InvalidSettingsDimensions { embedder_name: String }, InvalidSettingsDimensions { embedder_name: String },
#[error(
"`.embedders.{embedder_name}.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors."
)]
InvalidDisableBinaryQuantization { embedder_name: String },
#[error("`.embedders.{embedder_name}.documentTemplateMaxBytes`: `documentTemplateMaxBytes` cannot be zero")] #[error("`.embedders.{embedder_name}.documentTemplateMaxBytes`: `documentTemplateMaxBytes` cannot be zero")]
InvalidSettingsDocumentTemplateMaxBytes { embedder_name: String }, InvalidSettingsDocumentTemplateMaxBytes { embedder_name: String },
#[error("`.embedders.{embedder_name}.url`: could not parse `{url}`: {inner_error}")] #[error("`.embedders.{embedder_name}.url`: could not parse `{url}`: {inner_error}")]
@@ -297,7 +293,6 @@ impl From<arroy::Error> for Error {
arroy::Error::InvalidVecDimension { expected, received } => { arroy::Error::InvalidVecDimension { expected, received } => {
Error::UserError(UserError::InvalidVectorDimensions { expected, found: received }) Error::UserError(UserError::InvalidVectorDimensions { expected, found: received })
} }
arroy::Error::BuildCancelled => Error::InternalError(InternalError::AbortedIndexation),
arroy::Error::DatabaseFull arroy::Error::DatabaseFull
| arroy::Error::InvalidItemAppend | arroy::Error::InvalidItemAppend
| arroy::Error::UnmatchingDistance { .. } | arroy::Error::UnmatchingDistance { .. }

View File

@@ -21,7 +21,7 @@ use crate::heed_codec::{BEU16StrCodec, FstSetCodec, StrBEU16Codec, StrRefCodec};
use crate::order_by_map::OrderByMap; use crate::order_by_map::OrderByMap;
use crate::proximity::ProximityPrecision; use crate::proximity::ProximityPrecision;
use crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME; use crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME;
use crate::vector::{ArroyWrapper, Embedding, EmbeddingConfig}; use crate::vector::{Embedding, EmbeddingConfig};
use crate::{ use crate::{
default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds, default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds,
FacetDistribution, FieldDistribution, FieldId, FieldIdMapMissingEntry, FieldIdWordCountCodec, FacetDistribution, FieldDistribution, FieldId, FieldIdMapMissingEntry, FieldIdWordCountCodec,
@@ -162,7 +162,7 @@ pub struct Index {
/// Maps an embedder name to its id in the arroy store. /// Maps an embedder name to its id in the arroy store.
pub embedder_category_id: Database<Str, U8>, pub embedder_category_id: Database<Str, U8>,
/// Vector store based on arroy™. /// Vector store based on arroy™.
pub vector_arroy: arroy::Database<Unspecified>, pub vector_arroy: arroy::Database<arroy::distances::Angular>,
/// Maps the document id to the document as an obkv store. /// Maps the document id to the document as an obkv store.
pub(crate) documents: Database<BEU32, ObkvCodec>, pub(crate) documents: Database<BEU32, ObkvCodec>,
@@ -1610,6 +1610,22 @@ impl Index {
.unwrap_or_default()) .unwrap_or_default())
} }
pub fn arroy_readers<'a>(
&'a self,
rtxn: &'a RoTxn<'a>,
embedder_id: u8,
) -> impl Iterator<Item = Result<arroy::Reader<'a, arroy::distances::Angular>>> + 'a {
crate::vector::arroy_db_range_for_embedder(embedder_id).map_while(move |k| {
arroy::Reader::open(rtxn, k, self.vector_arroy)
.map(Some)
.or_else(|e| match e {
arroy::Error::MissingMetadata(_) => Ok(None),
e => Err(e.into()),
})
.transpose()
})
}
pub(crate) fn put_search_cutoff(&self, wtxn: &mut RwTxn<'_>, cutoff: u64) -> heed::Result<()> { pub(crate) fn put_search_cutoff(&self, wtxn: &mut RwTxn<'_>, cutoff: u64) -> heed::Result<()> {
self.main.remap_types::<Str, BEU64>().put(wtxn, main_key::SEARCH_CUTOFF, &cutoff) self.main.remap_types::<Str, BEU64>().put(wtxn, main_key::SEARCH_CUTOFF, &cutoff)
} }
@@ -1628,13 +1644,32 @@ impl Index {
docid: DocumentId, docid: DocumentId,
) -> Result<BTreeMap<String, Vec<Embedding>>> { ) -> Result<BTreeMap<String, Vec<Embedding>>> {
let mut res = BTreeMap::new(); let mut res = BTreeMap::new();
let embedding_configs = self.embedding_configs(rtxn)?; for row in self.embedder_category_id.iter(rtxn)? {
for config in embedding_configs { let (embedder_name, embedder_id) = row?;
let embedder_id = self.embedder_category_id.get(rtxn, &config.name)?.unwrap(); let embedder_id = (embedder_id as u16) << 8;
let reader = let mut embeddings = Vec::new();
ArroyWrapper::new(self.vector_arroy, embedder_id, config.config.quantized()); 'vectors: for i in 0..=u8::MAX {
let embeddings = reader.item_vectors(rtxn, docid)?; let reader = arroy::Reader::open(rtxn, embedder_id | (i as u16), self.vector_arroy)
res.insert(config.name.to_owned(), embeddings); .map(Some)
.or_else(|e| match e {
arroy::Error::MissingMetadata(_) => Ok(None),
e => Err(e),
})
.transpose();
let Some(reader) = reader else {
break 'vectors;
};
let embedding = reader?.item_vector(rtxn, docid)?;
if let Some(embedding) = embedding {
embeddings.push(embedding)
} else {
break 'vectors;
}
}
res.insert(embedder_name.to_owned(), embeddings);
} }
Ok(res) Ok(res)
} }

View File

@@ -190,7 +190,7 @@ impl<'a> Search<'a> {
return Ok(return_keyword_results(self.limit, self.offset, keyword_results)); return Ok(return_keyword_results(self.limit, self.offset, keyword_results));
}; };
// no embedder, no semantic search // no embedder, no semantic search
let Some(SemanticSearch { vector, embedder_name, embedder, quantized }) = semantic else { let Some(SemanticSearch { vector, embedder_name, embedder }) = semantic else {
return Ok(return_keyword_results(self.limit, self.offset, keyword_results)); return Ok(return_keyword_results(self.limit, self.offset, keyword_results));
}; };
@@ -212,7 +212,7 @@ impl<'a> Search<'a> {
}; };
search.semantic = search.semantic =
Some(SemanticSearch { vector: Some(vector_query), embedder_name, embedder, quantized }); Some(SemanticSearch { vector: Some(vector_query), embedder_name, embedder });
// TODO: would be better to have two distinct functions at this point // TODO: would be better to have two distinct functions at this point
let vector_results = search.execute()?; let vector_results = search.execute()?;

View File

@@ -32,7 +32,6 @@ pub struct SemanticSearch {
vector: Option<Vec<f32>>, vector: Option<Vec<f32>>,
embedder_name: String, embedder_name: String,
embedder: Arc<Embedder>, embedder: Arc<Embedder>,
quantized: bool,
} }
pub struct Search<'a> { pub struct Search<'a> {
@@ -90,10 +89,9 @@ impl<'a> Search<'a> {
&mut self, &mut self,
embedder_name: String, embedder_name: String,
embedder: Arc<Embedder>, embedder: Arc<Embedder>,
quantized: bool,
vector: Option<Vec<f32>>, vector: Option<Vec<f32>>,
) -> &mut Search<'a> { ) -> &mut Search<'a> {
self.semantic = Some(SemanticSearch { embedder_name, embedder, quantized, vector }); self.semantic = Some(SemanticSearch { embedder_name, embedder, vector });
self self
} }
@@ -208,7 +206,7 @@ impl<'a> Search<'a> {
degraded, degraded,
used_negative_operator, used_negative_operator,
} = match self.semantic.as_ref() { } = match self.semantic.as_ref() {
Some(SemanticSearch { vector: Some(vector), embedder_name, embedder, quantized }) => { Some(SemanticSearch { vector: Some(vector), embedder_name, embedder }) => {
execute_vector_search( execute_vector_search(
&mut ctx, &mut ctx,
vector, vector,
@@ -221,7 +219,6 @@ impl<'a> Search<'a> {
self.limit, self.limit,
embedder_name, embedder_name,
embedder, embedder,
*quantized,
self.time_budget.clone(), self.time_budget.clone(),
self.ranking_score_threshold, self.ranking_score_threshold,
)? )?

View File

@@ -312,7 +312,6 @@ fn get_ranking_rules_for_placeholder_search<'ctx>(
Ok(ranking_rules) Ok(ranking_rules)
} }
#[allow(clippy::too_many_arguments)]
fn get_ranking_rules_for_vector<'ctx>( fn get_ranking_rules_for_vector<'ctx>(
ctx: &SearchContext<'ctx>, ctx: &SearchContext<'ctx>,
sort_criteria: &Option<Vec<AscDesc>>, sort_criteria: &Option<Vec<AscDesc>>,
@@ -321,7 +320,6 @@ fn get_ranking_rules_for_vector<'ctx>(
target: &[f32], target: &[f32],
embedder_name: &str, embedder_name: &str,
embedder: &Embedder, embedder: &Embedder,
quantized: bool,
) -> Result<Vec<BoxRankingRule<'ctx, PlaceholderQuery>>> { ) -> Result<Vec<BoxRankingRule<'ctx, PlaceholderQuery>>> {
// query graph search // query graph search
@@ -349,7 +347,6 @@ fn get_ranking_rules_for_vector<'ctx>(
limit_plus_offset, limit_plus_offset,
embedder_name, embedder_name,
embedder, embedder,
quantized,
)?; )?;
ranking_rules.push(Box::new(vector_sort)); ranking_rules.push(Box::new(vector_sort));
vector = true; vector = true;
@@ -579,7 +576,6 @@ pub fn execute_vector_search(
length: usize, length: usize,
embedder_name: &str, embedder_name: &str,
embedder: &Embedder, embedder: &Embedder,
quantized: bool,
time_budget: TimeBudget, time_budget: TimeBudget,
ranking_score_threshold: Option<f64>, ranking_score_threshold: Option<f64>,
) -> Result<PartialSearchResult> { ) -> Result<PartialSearchResult> {
@@ -595,7 +591,6 @@ pub fn execute_vector_search(
vector, vector,
embedder_name, embedder_name,
embedder, embedder,
quantized,
)?; )?;
let mut placeholder_search_logger = logger::DefaultSearchLogger; let mut placeholder_search_logger = logger::DefaultSearchLogger;

View File

@@ -1,10 +1,11 @@
use std::iter::FromIterator; use std::iter::FromIterator;
use ordered_float::OrderedFloat;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use super::ranking_rules::{RankingRule, RankingRuleOutput, RankingRuleQueryTrait}; use super::ranking_rules::{RankingRule, RankingRuleOutput, RankingRuleQueryTrait};
use crate::score_details::{self, ScoreDetails}; use crate::score_details::{self, ScoreDetails};
use crate::vector::{ArroyWrapper, DistributionShift, Embedder}; use crate::vector::{DistributionShift, Embedder};
use crate::{DocumentId, Result, SearchContext, SearchLogger}; use crate::{DocumentId, Result, SearchContext, SearchLogger};
pub struct VectorSort<Q: RankingRuleQueryTrait> { pub struct VectorSort<Q: RankingRuleQueryTrait> {
@@ -15,7 +16,6 @@ pub struct VectorSort<Q: RankingRuleQueryTrait> {
limit: usize, limit: usize,
distribution_shift: Option<DistributionShift>, distribution_shift: Option<DistributionShift>,
embedder_index: u8, embedder_index: u8,
quantized: bool,
} }
impl<Q: RankingRuleQueryTrait> VectorSort<Q> { impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
@@ -26,7 +26,6 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
limit: usize, limit: usize,
embedder_name: &str, embedder_name: &str,
embedder: &Embedder, embedder: &Embedder,
quantized: bool,
) -> Result<Self> { ) -> Result<Self> {
let embedder_index = ctx let embedder_index = ctx
.index .index
@@ -42,7 +41,6 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
limit, limit,
distribution_shift: embedder.distribution(), distribution_shift: embedder.distribution(),
embedder_index, embedder_index,
quantized,
}) })
} }
@@ -51,10 +49,19 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
ctx: &mut SearchContext<'_>, ctx: &mut SearchContext<'_>,
vector_candidates: &RoaringBitmap, vector_candidates: &RoaringBitmap,
) -> Result<()> { ) -> Result<()> {
let target = &self.target; let readers: std::result::Result<Vec<_>, _> =
ctx.index.arroy_readers(ctx.txn, self.embedder_index).collect();
let readers = readers?;
let reader = ArroyWrapper::new(ctx.index.vector_arroy, self.embedder_index, self.quantized); let target = &self.target;
let results = reader.nns_by_vector(ctx.txn, target, self.limit, Some(vector_candidates))?; let mut results = Vec::new();
for reader in readers.iter() {
let nns_by_vector =
reader.nns_by_vector(ctx.txn, target, self.limit, None, Some(vector_candidates))?;
results.extend(nns_by_vector.into_iter());
}
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));
self.cached_sorted_docids = results.into_iter(); self.cached_sorted_docids = results.into_iter();
Ok(()) Ok(())

View File

@@ -1,9 +1,10 @@
use std::sync::Arc; use std::sync::Arc;
use ordered_float::OrderedFloat;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use crate::score_details::{self, ScoreDetails}; use crate::score_details::{self, ScoreDetails};
use crate::vector::{ArroyWrapper, Embedder}; use crate::vector::Embedder;
use crate::{filtered_universe, DocumentId, Filter, Index, Result, SearchResult}; use crate::{filtered_universe, DocumentId, Filter, Index, Result, SearchResult};
pub struct Similar<'a> { pub struct Similar<'a> {
@@ -17,11 +18,9 @@ pub struct Similar<'a> {
embedder_name: String, embedder_name: String,
embedder: Arc<Embedder>, embedder: Arc<Embedder>,
ranking_score_threshold: Option<f64>, ranking_score_threshold: Option<f64>,
quantized: bool,
} }
impl<'a> Similar<'a> { impl<'a> Similar<'a> {
#[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
id: DocumentId, id: DocumentId,
offset: usize, offset: usize,
@@ -30,7 +29,6 @@ impl<'a> Similar<'a> {
rtxn: &'a heed::RoTxn<'a>, rtxn: &'a heed::RoTxn<'a>,
embedder_name: String, embedder_name: String,
embedder: Arc<Embedder>, embedder: Arc<Embedder>,
quantized: bool,
) -> Self { ) -> Self {
Self { Self {
id, id,
@@ -42,7 +40,6 @@ impl<'a> Similar<'a> {
embedder_name, embedder_name,
embedder, embedder,
ranking_score_threshold: None, ranking_score_threshold: None,
quantized,
} }
} }
@@ -70,13 +67,29 @@ impl<'a> Similar<'a> {
.get(self.rtxn, &self.embedder_name)? .get(self.rtxn, &self.embedder_name)?
.ok_or_else(|| crate::UserError::InvalidEmbedder(self.embedder_name.to_owned()))?; .ok_or_else(|| crate::UserError::InvalidEmbedder(self.embedder_name.to_owned()))?;
let reader = ArroyWrapper::new(self.index.vector_arroy, embedder_index, self.quantized); let readers: std::result::Result<Vec<_>, _> =
let results = reader.nns_by_item( self.index.arroy_readers(self.rtxn, embedder_index).collect();
self.rtxn,
self.id, let readers = readers?;
self.limit + self.offset + 1,
Some(&universe), let mut results = Vec::new();
)?;
for reader in readers.iter() {
let nns_by_item = reader.nns_by_item(
self.rtxn,
self.id,
self.limit + self.offset + 1,
None,
Some(&universe),
)?;
if let Some(mut nns_by_item) = nns_by_item {
results.append(&mut nns_by_item);
} else {
break;
}
}
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));
let mut documents_ids = Vec::with_capacity(self.limit); let mut documents_ids = Vec::with_capacity(self.limit);
let mut document_scores = Vec::with_capacity(self.limit); let mut document_scores = Vec::with_capacity(self.limit);

View File

@@ -20,7 +20,7 @@ use crate::update::del_add::{DelAdd, KvReaderDelAdd, KvWriterDelAdd};
use crate::update::settings::InnerIndexSettingsDiff; use crate::update::settings::InnerIndexSettingsDiff;
use crate::vector::error::{EmbedErrorKind, PossibleEmbeddingMistakes, UnusedVectorsDistribution}; use crate::vector::error::{EmbedErrorKind, PossibleEmbeddingMistakes, UnusedVectorsDistribution};
use crate::vector::parsed_vectors::{ParsedVectorsDiff, VectorState, RESERVED_VECTORS_FIELD_NAME}; use crate::vector::parsed_vectors::{ParsedVectorsDiff, VectorState, RESERVED_VECTORS_FIELD_NAME};
use crate::vector::settings::ReindexAction; use crate::vector::settings::{EmbedderAction, ReindexAction};
use crate::vector::{Embedder, Embeddings}; use crate::vector::{Embedder, Embeddings};
use crate::{try_split_array_at, DocumentId, FieldId, Result, ThreadPoolNoAbort}; use crate::{try_split_array_at, DocumentId, FieldId, Result, ThreadPoolNoAbort};
@@ -208,65 +208,65 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
if reindex_vectors { if reindex_vectors {
for (name, action) in settings_diff.embedding_config_updates.iter() { for (name, action) in settings_diff.embedding_config_updates.iter() {
if let Some(action) = action.reindex() { match action {
let Some((embedder_name, (embedder, prompt, _quantized))) = EmbedderAction::WriteBackToDocuments(_) => continue, // already deleted
configs.remove_entry(name) EmbedderAction::Reindex(action) => {
else { let Some((embedder_name, (embedder, prompt))) = configs.remove_entry(name)
tracing::error!(embedder = name, "Requested embedder config not found"); else {
continue; tracing::error!(embedder = name, "Requested embedder config not found");
}; continue;
};
// (docid, _index) -> KvWriterDelAdd -> Vector // (docid, _index) -> KvWriterDelAdd -> Vector
let manual_vectors_writer = create_writer( let manual_vectors_writer = create_writer(
indexer.chunk_compression_type, indexer.chunk_compression_type,
indexer.chunk_compression_level, indexer.chunk_compression_level,
tempfile::tempfile()?, tempfile::tempfile()?,
); );
// (docid) -> (prompt) // (docid) -> (prompt)
let prompts_writer = create_writer( let prompts_writer = create_writer(
indexer.chunk_compression_type, indexer.chunk_compression_type,
indexer.chunk_compression_level, indexer.chunk_compression_level,
tempfile::tempfile()?, tempfile::tempfile()?,
); );
// (docid) -> () // (docid) -> ()
let remove_vectors_writer = create_writer( let remove_vectors_writer = create_writer(
indexer.chunk_compression_type, indexer.chunk_compression_type,
indexer.chunk_compression_level, indexer.chunk_compression_level,
tempfile::tempfile()?, tempfile::tempfile()?,
); );
let action = match action { let action = match action {
ReindexAction::FullReindex => ExtractionAction::SettingsFullReindex, ReindexAction::FullReindex => ExtractionAction::SettingsFullReindex,
ReindexAction::RegeneratePrompts => { ReindexAction::RegeneratePrompts => {
let Some((_, old_prompt, _quantized)) = old_configs.get(name) else { let Some((_, old_prompt)) = old_configs.get(name) else {
tracing::error!(embedder = name, "Old embedder config not found"); tracing::error!(embedder = name, "Old embedder config not found");
continue; continue;
}; };
ExtractionAction::SettingsRegeneratePrompts { old_prompt } ExtractionAction::SettingsRegeneratePrompts { old_prompt }
} }
}; };
extractors.push(EmbedderVectorExtractor { extractors.push(EmbedderVectorExtractor {
embedder_name, embedder_name,
embedder, embedder,
prompt, prompt,
prompts_writer, prompts_writer,
remove_vectors_writer, remove_vectors_writer,
manual_vectors_writer, manual_vectors_writer,
add_to_user_provided: RoaringBitmap::new(), add_to_user_provided: RoaringBitmap::new(),
action, action,
}); });
} else { }
continue;
} }
} }
} else { } else {
// document operation // document operation
for (embedder_name, (embedder, prompt, _quantized)) in configs.into_iter() { for (embedder_name, (embedder, prompt)) in configs.into_iter() {
// (docid, _index) -> KvWriterDelAdd -> Vector // (docid, _index) -> KvWriterDelAdd -> Vector
let manual_vectors_writer = create_writer( let manual_vectors_writer = create_writer(
indexer.chunk_compression_type, indexer.chunk_compression_type,

View File

@@ -34,7 +34,7 @@ use crate::index::IndexEmbeddingConfig;
use crate::update::settings::InnerIndexSettingsDiff; use crate::update::settings::InnerIndexSettingsDiff;
use crate::vector::error::PossibleEmbeddingMistakes; use crate::vector::error::PossibleEmbeddingMistakes;
use crate::{FieldId, Result, ThreadPoolNoAbort, ThreadPoolNoAbortBuilder}; use crate::{FieldId, Result, ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
/// Hello!
/// Extract data for each databases from obkv documents in parallel. /// Extract data for each databases from obkv documents in parallel.
/// Send data in grenad file over provided Sender. /// Send data in grenad file over provided Sender.
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]

View File

@@ -43,7 +43,7 @@ use crate::update::index_documents::parallel::ImmutableObkvs;
use crate::update::{ use crate::update::{
IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst, IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst,
}; };
use crate::vector::{ArroyWrapper, EmbeddingConfigs}; use crate::vector::EmbeddingConfigs;
use crate::{CboRoaringBitmapCodec, Index, Object, Result}; use crate::{CboRoaringBitmapCodec, Index, Object, Result};
static MERGED_DATABASE_COUNT: usize = 7; static MERGED_DATABASE_COUNT: usize = 7;
@@ -679,42 +679,23 @@ where
let number_of_documents = self.index.number_of_documents(self.wtxn)?; let number_of_documents = self.index.number_of_documents(self.wtxn)?;
let mut rng = rand::rngs::StdRng::seed_from_u64(42); let mut rng = rand::rngs::StdRng::seed_from_u64(42);
// If an embedder wasn't used in the typedchunk but must be binary quantized
// we should insert it in `dimension`
for (name, action) in settings_diff.embedding_config_updates.iter() {
if action.is_being_quantized && !dimension.contains_key(name.as_str()) {
let index = self.index.embedder_category_id.get(self.wtxn, name)?.ok_or(
InternalError::DatabaseMissingEntry {
db_name: "embedder_category_id",
key: None,
},
)?;
let reader =
ArroyWrapper::new(self.index.vector_arroy, index, action.was_quantized);
let dim = reader.dimensions(self.wtxn)?;
dimension.insert(name.to_string(), dim);
}
}
for (embedder_name, dimension) in dimension { for (embedder_name, dimension) in dimension {
let wtxn = &mut *self.wtxn; let wtxn = &mut *self.wtxn;
let vector_arroy = self.index.vector_arroy; let vector_arroy = self.index.vector_arroy;
let cancel = &self.should_abort;
let embedder_index = self.index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or( let embedder_index = self.index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or(
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None }, InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
)?; )?;
let embedder_config = settings_diff.embedding_config_updates.get(&embedder_name);
let was_quantized = settings_diff
.old
.embedding_configs
.get(&embedder_name)
.map_or(false, |conf| conf.2);
let is_quantizing = embedder_config.map_or(false, |action| action.is_being_quantized);
pool.install(|| { pool.install(|| {
let mut writer = ArroyWrapper::new(vector_arroy, embedder_index, was_quantized); for k in crate::vector::arroy_db_range_for_embedder(embedder_index) {
writer.build_and_quantize(wtxn, &mut rng, dimension, is_quantizing, cancel)?; let writer = arroy::Writer::new(vector_arroy, k, dimension);
if writer.need_build(wtxn)? {
writer.build(wtxn, &mut rng, None)?;
} else if writer.is_empty(wtxn)? {
break;
}
}
Result::Ok(()) Result::Ok(())
}) })
.map_err(InternalError::from)??; .map_err(InternalError::from)??;
@@ -2765,7 +2746,6 @@ mod tests {
response: Setting::NotSet, response: Setting::NotSet,
distribution: Setting::NotSet, distribution: Setting::NotSet,
headers: Setting::NotSet, headers: Setting::NotSet,
binary_quantized: Setting::NotSet,
}), }),
); );
settings.set_embedder_settings(embedders); settings.set_embedder_settings(embedders);
@@ -2794,7 +2774,7 @@ mod tests {
std::sync::Arc::new(crate::vector::Embedder::new(embedder.embedder_options).unwrap()); std::sync::Arc::new(crate::vector::Embedder::new(embedder.embedder_options).unwrap());
let res = index let res = index
.search(&rtxn) .search(&rtxn)
.semantic(embedder_name, embedder, false, Some([0.0, 1.0, 2.0].to_vec())) .semantic(embedder_name, embedder, Some([0.0, 1.0, 2.0].to_vec()))
.execute() .execute()
.unwrap(); .unwrap();
assert_eq!(res.documents_ids.len(), 3); assert_eq!(res.documents_ids.len(), 3);

View File

@@ -28,8 +28,7 @@ use crate::update::index_documents::GrenadParameters;
use crate::update::settings::{InnerIndexSettings, InnerIndexSettingsDiff}; use crate::update::settings::{InnerIndexSettings, InnerIndexSettingsDiff};
use crate::update::{AvailableDocumentsIds, UpdateIndexingStep}; use crate::update::{AvailableDocumentsIds, UpdateIndexingStep};
use crate::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors}; use crate::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors};
use crate::vector::settings::WriteBackToDocuments; use crate::vector::settings::{EmbedderAction, WriteBackToDocuments};
use crate::vector::ArroyWrapper;
use crate::{ use crate::{
is_faceted_by, FieldDistribution, FieldId, FieldIdMapMissingEntry, FieldsIdsMap, Index, Result, is_faceted_by, FieldDistribution, FieldId, FieldIdMapMissingEntry, FieldsIdsMap, Index, Result,
}; };
@@ -990,24 +989,29 @@ impl<'a, 'i> Transform<'a, 'i> {
None None
}; };
let readers: BTreeMap<&str, (ArroyWrapper, &RoaringBitmap)> = settings_diff let readers: Result<
BTreeMap<&str, (Vec<arroy::Reader<'_, arroy::distances::Angular>>, &RoaringBitmap)>,
> = settings_diff
.embedding_config_updates .embedding_config_updates
.iter() .iter()
.filter_map(|(name, action)| { .filter_map(|(name, action)| {
if let Some(WriteBackToDocuments { embedder_id, user_provided }) = if let EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
action.write_back() embedder_id,
user_provided,
}) = action
{ {
let reader = ArroyWrapper::new( let readers: Result<Vec<_>> =
self.index.vector_arroy, self.index.arroy_readers(wtxn, *embedder_id).collect();
*embedder_id, match readers {
action.was_quantized, Ok(readers) => Some(Ok((name.as_str(), (readers, user_provided)))),
); Err(error) => Some(Err(error)),
Some((name.as_str(), (reader, user_provided))) }
} else { } else {
None None
} }
}) })
.collect(); .collect();
let readers = readers?;
let old_vectors_fid = settings_diff let old_vectors_fid = settings_diff
.old .old
@@ -1045,24 +1049,34 @@ impl<'a, 'i> Transform<'a, 'i> {
arroy::Error, arroy::Error,
> = readers > = readers
.iter() .iter()
.filter_map(|(name, (reader, user_provided))| { .filter_map(|(name, (readers, user_provided))| {
if !user_provided.contains(docid) { if !user_provided.contains(docid) {
return None; return None;
} }
match reader.item_vectors(wtxn, docid) { let mut vectors = Vec::new();
Ok(vectors) if vectors.is_empty() => None, for reader in readers {
Ok(vectors) => Some(Ok(( let Some(vector) = reader.item_vector(wtxn, docid).transpose() else {
name.to_string(), break;
serde_json::to_value(ExplicitVectors { };
embeddings: Some(
VectorOrArrayOfVectors::from_array_of_vectors(vectors), match vector {
), Ok(vector) => vectors.push(vector),
regenerate: false, Err(error) => return Some(Err(error)),
}) }
.unwrap(),
))),
Err(e) => Some(Err(e)),
} }
if vectors.is_empty() {
return None;
}
Some(Ok((
name.to_string(),
serde_json::to_value(ExplicitVectors {
embeddings: Some(VectorOrArrayOfVectors::from_array_of_vectors(
vectors,
)),
regenerate: false,
})
.unwrap(),
)))
}) })
.collect(); .collect();
@@ -1090,10 +1104,21 @@ impl<'a, 'i> Transform<'a, 'i> {
} }
} }
let mut writers = Vec::new();
// delete all vectors from the embedders that need removal // delete all vectors from the embedders that need removal
for (_, (reader, _)) in readers { for (_, (readers, _)) in readers {
let dimensions = reader.dimensions(wtxn)?; for reader in readers {
reader.clear(wtxn, dimensions)?; let dimensions = reader.dimensions();
let arroy_index = reader.index();
drop(reader);
let writer = arroy::Writer::new(self.index.vector_arroy, arroy_index, dimensions);
writers.push(writer);
}
}
for writer in writers {
writer.clear(wtxn)?;
} }
let grenad_params = GrenadParameters { let grenad_params = GrenadParameters {

View File

@@ -27,7 +27,6 @@ use crate::update::index_documents::helpers::{
as_cloneable_grenad, keep_latest_obkv, try_split_array_at, as_cloneable_grenad, keep_latest_obkv, try_split_array_at,
}; };
use crate::update::settings::InnerIndexSettingsDiff; use crate::update::settings::InnerIndexSettingsDiff;
use crate::vector::ArroyWrapper;
use crate::{ use crate::{
lat_lng_to_xyz, CboRoaringBitmapCodec, DocumentId, FieldId, GeoPoint, Index, InternalError, lat_lng_to_xyz, CboRoaringBitmapCodec, DocumentId, FieldId, GeoPoint, Index, InternalError,
Result, SerializationError, U8StrStrCodec, Result, SerializationError, U8StrStrCodec,
@@ -667,20 +666,23 @@ pub(crate) fn write_typed_chunk_into_index(
let embedder_index = index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or( let embedder_index = index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or(
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None }, InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
)?; )?;
let binary_quantized = settings_diff
.old
.embedding_configs
.get(&embedder_name)
.map_or(false, |conf| conf.2);
// FIXME: allow customizing distance // FIXME: allow customizing distance
let writer = ArroyWrapper::new(index.vector_arroy, embedder_index, binary_quantized); let writers: Vec<_> = crate::vector::arroy_db_range_for_embedder(embedder_index)
.map(|k| arroy::Writer::new(index.vector_arroy, k, expected_dimension))
.collect();
// remove vectors for docids we want them removed // remove vectors for docids we want them removed
let merger = remove_vectors_builder.build(); let merger = remove_vectors_builder.build();
let mut iter = merger.into_stream_merger_iter()?; let mut iter = merger.into_stream_merger_iter()?;
while let Some((key, _)) = iter.next()? { while let Some((key, _)) = iter.next()? {
let docid = key.try_into().map(DocumentId::from_be_bytes).unwrap(); let docid = key.try_into().map(DocumentId::from_be_bytes).unwrap();
writer.del_items(wtxn, expected_dimension, docid)?;
for writer in &writers {
// Uses invariant: vectors are packed in the first writers.
if !writer.del_item(wtxn, docid)? {
break;
}
}
} }
// add generated embeddings // add generated embeddings
@@ -708,7 +710,9 @@ pub(crate) fn write_typed_chunk_into_index(
embeddings.embedding_count(), embeddings.embedding_count(),
))); )));
} }
writer.add_items(wtxn, docid, &embeddings)?; for (embedding, writer) in embeddings.iter().zip(&writers) {
writer.add_item(wtxn, docid, embedding)?;
}
} }
// perform the manual diff // perform the manual diff
@@ -723,14 +727,46 @@ pub(crate) fn write_typed_chunk_into_index(
if let Some(value) = vector_deladd_obkv.get(DelAdd::Deletion) { if let Some(value) = vector_deladd_obkv.get(DelAdd::Deletion) {
let vector: Vec<f32> = pod_collect_to_vec(value); let vector: Vec<f32> = pod_collect_to_vec(value);
writer.del_item(wtxn, docid, &vector)?; let mut deleted_index = None;
for (index, writer) in writers.iter().enumerate() {
let Some(candidate) = writer.item_vector(wtxn, docid)? else {
// uses invariant: vectors are packed in the first writers.
break;
};
if candidate == vector {
writer.del_item(wtxn, docid)?;
deleted_index = Some(index);
}
}
// 🥲 enforce invariant: vectors are packed in the first writers.
if let Some(deleted_index) = deleted_index {
let mut last_index_with_a_vector = None;
for (index, writer) in writers.iter().enumerate().skip(deleted_index) {
let Some(candidate) = writer.item_vector(wtxn, docid)? else {
break;
};
last_index_with_a_vector = Some((index, candidate));
}
if let Some((last_index, vector)) = last_index_with_a_vector {
// unwrap: computed the index from the list of writers
let writer = writers.get(last_index).unwrap();
writer.del_item(wtxn, docid)?;
writers.get(deleted_index).unwrap().add_item(wtxn, docid, &vector)?;
}
}
} }
if let Some(value) = vector_deladd_obkv.get(DelAdd::Addition) { if let Some(value) = vector_deladd_obkv.get(DelAdd::Addition) {
let vector = pod_collect_to_vec(value); let vector = pod_collect_to_vec(value);
// overflow was detected during vector extraction. // overflow was detected during vector extraction.
writer.add_item(wtxn, docid, &vector)?; for writer in &writers {
if !writer.contains_item(wtxn, docid)? {
writer.add_item(wtxn, docid, &vector)?;
break;
}
}
} }
} }

View File

@@ -954,7 +954,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
let old_configs = self.index.embedding_configs(self.wtxn)?; let old_configs = self.index.embedding_configs(self.wtxn)?;
let remove_all: Result<BTreeMap<String, EmbedderAction>> = old_configs let remove_all: Result<BTreeMap<String, EmbedderAction>> = old_configs
.into_iter() .into_iter()
.map(|IndexEmbeddingConfig { name, config, user_provided }| -> Result<_> { .map(|IndexEmbeddingConfig { name, config: _, user_provided }| -> Result<_> {
let embedder_id = let embedder_id =
self.index.embedder_category_id.get(self.wtxn, &name)?.ok_or( self.index.embedder_category_id.get(self.wtxn, &name)?.ok_or(
crate::InternalError::DatabaseMissingEntry { crate::InternalError::DatabaseMissingEntry {
@@ -964,10 +964,10 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
)?; )?;
Ok(( Ok((
name, name,
EmbedderAction::with_write_back( EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
WriteBackToDocuments { embedder_id, user_provided }, embedder_id,
config.quantized(), user_provided,
), }),
)) ))
}) })
.collect(); .collect();
@@ -1004,8 +1004,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
match joined { match joined {
// updated config // updated config
EitherOrBoth::Both((name, (old, user_provided)), (_, new)) => { EitherOrBoth::Both((name, (old, user_provided)), (_, new)) => {
let was_quantized = old.binary_quantized.set().unwrap_or_default(); let settings_diff = SettingsDiff::from_settings(old, new);
let settings_diff = SettingsDiff::from_settings(&name, old, new)?;
match settings_diff { match settings_diff {
SettingsDiff::Remove => { SettingsDiff::Remove => {
tracing::debug!( tracing::debug!(
@@ -1024,29 +1023,25 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
self.index.embedder_category_id.delete(self.wtxn, &name)?; self.index.embedder_category_id.delete(self.wtxn, &name)?;
embedder_actions.insert( embedder_actions.insert(
name, name,
EmbedderAction::with_write_back( EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
WriteBackToDocuments { embedder_id, user_provided }, embedder_id,
was_quantized, user_provided,
), }),
); );
} }
SettingsDiff::Reindex { action, updated_settings, quantize } => { SettingsDiff::Reindex { action, updated_settings } => {
tracing::debug!( tracing::debug!(
embedder = name, embedder = name,
user_provided = user_provided.len(), user_provided = user_provided.len(),
?action, ?action,
"reindex embedder" "reindex embedder"
); );
embedder_actions.insert( embedder_actions.insert(name.clone(), EmbedderAction::Reindex(action));
name.clone(),
EmbedderAction::with_reindex(action, was_quantized)
.with_is_being_quantized(quantize),
);
let new = let new =
validate_embedding_settings(Setting::Set(updated_settings), &name)?; validate_embedding_settings(Setting::Set(updated_settings), &name)?;
updated_configs.insert(name, (new, user_provided)); updated_configs.insert(name, (new, user_provided));
} }
SettingsDiff::UpdateWithoutReindex { updated_settings, quantize } => { SettingsDiff::UpdateWithoutReindex { updated_settings } => {
tracing::debug!( tracing::debug!(
embedder = name, embedder = name,
user_provided = user_provided.len(), user_provided = user_provided.len(),
@@ -1054,12 +1049,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
); );
let new = let new =
validate_embedding_settings(Setting::Set(updated_settings), &name)?; validate_embedding_settings(Setting::Set(updated_settings), &name)?;
if quantize {
embedder_actions.insert(
name.clone(),
EmbedderAction::default().with_is_being_quantized(true),
);
}
updated_configs.insert(name, (new, user_provided)); updated_configs.insert(name, (new, user_provided));
} }
} }
@@ -1078,10 +1067,8 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
&mut setting, &mut setting,
); );
let setting = validate_embedding_settings(setting, &name)?; let setting = validate_embedding_settings(setting, &name)?;
embedder_actions.insert( embedder_actions
name.clone(), .insert(name.clone(), EmbedderAction::Reindex(ReindexAction::FullReindex));
EmbedderAction::with_reindex(ReindexAction::FullReindex, false),
);
updated_configs.insert(name, (setting, RoaringBitmap::new())); updated_configs.insert(name, (setting, RoaringBitmap::new()));
} }
} }
@@ -1095,14 +1082,19 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
let mut find_free_index = let mut find_free_index =
move || free_indices.find(|(_, free)| **free).map(|(index, _)| index as u8); move || free_indices.find(|(_, free)| **free).map(|(index, _)| index as u8);
for (name, action) in embedder_actions.iter() { for (name, action) in embedder_actions.iter() {
// ignore actions that are not possible for a new embedder match action {
if matches!(action.reindex(), Some(ReindexAction::FullReindex)) EmbedderAction::Reindex(ReindexAction::RegeneratePrompts) => {
&& self.index.embedder_category_id.get(self.wtxn, name)?.is_none() /* cannot be a new embedder, so has to have an id already */
{ }
let id = EmbedderAction::Reindex(ReindexAction::FullReindex) => {
find_free_index().ok_or(UserError::TooManyEmbedders(updated_configs.len()))?; if self.index.embedder_category_id.get(self.wtxn, name)?.is_none() {
tracing::debug!(embedder = name, id, "assigning free id to new embedder"); let id = find_free_index()
self.index.embedder_category_id.put(self.wtxn, name, &id)?; .ok_or(UserError::TooManyEmbedders(updated_configs.len()))?;
tracing::debug!(embedder = name, id, "assigning free id to new embedder");
self.index.embedder_category_id.put(self.wtxn, name, &id)?;
}
}
EmbedderAction::WriteBackToDocuments(_) => { /* already removed */ }
} }
} }
let updated_configs: Vec<IndexEmbeddingConfig> = updated_configs let updated_configs: Vec<IndexEmbeddingConfig> = updated_configs
@@ -1285,11 +1277,7 @@ impl InnerIndexSettingsDiff {
// if the user-defined searchables changed, then we need to reindex prompts. // if the user-defined searchables changed, then we need to reindex prompts.
if cache_user_defined_searchables { if cache_user_defined_searchables {
for (embedder_name, (config, _, _quantized)) in for (embedder_name, (config, _)) in new_settings.embedding_configs.inner_as_ref() {
new_settings.embedding_configs.inner_as_ref()
{
let was_quantized =
old_settings.embedding_configs.get(embedder_name).map_or(false, |conf| conf.2);
// skip embedders that don't use document templates // skip embedders that don't use document templates
if !config.uses_document_template() { if !config.uses_document_template() {
continue; continue;
@@ -1299,19 +1287,16 @@ impl InnerIndexSettingsDiff {
// this always makes the code clearer by explicitly handling the cases // this always makes the code clearer by explicitly handling the cases
match embedding_config_updates.entry(embedder_name.clone()) { match embedding_config_updates.entry(embedder_name.clone()) {
std::collections::btree_map::Entry::Vacant(entry) => { std::collections::btree_map::Entry::Vacant(entry) => {
entry.insert(EmbedderAction::with_reindex( entry.insert(EmbedderAction::Reindex(ReindexAction::RegeneratePrompts));
ReindexAction::RegeneratePrompts,
was_quantized,
));
}
std::collections::btree_map::Entry::Occupied(entry) => {
let EmbedderAction {
was_quantized: _,
is_being_quantized: _,
write_back: _, // We are deleting this embedder, so no point in regeneration
reindex: _, // We are already fully reindexing
} = entry.get();
} }
std::collections::btree_map::Entry::Occupied(entry) => match entry.get() {
EmbedderAction::WriteBackToDocuments(_) => { /* we are deleting this embedder, so no point in regeneration */
}
EmbedderAction::Reindex(ReindexAction::FullReindex) => { /* we are already fully reindexing */
}
EmbedderAction::Reindex(ReindexAction::RegeneratePrompts) => { /* we are already regenerating prompts */
}
},
}; };
} }
} }
@@ -1561,7 +1546,7 @@ fn embedders(embedding_configs: Vec<IndexEmbeddingConfig>) -> Result<EmbeddingCo
.map( .map(
|IndexEmbeddingConfig { |IndexEmbeddingConfig {
name, name,
config: EmbeddingConfig { embedder_options, prompt, quantized }, config: EmbeddingConfig { embedder_options, prompt },
.. ..
}| { }| {
let prompt = Arc::new(prompt.try_into().map_err(crate::Error::from)?); let prompt = Arc::new(prompt.try_into().map_err(crate::Error::from)?);
@@ -1571,7 +1556,7 @@ fn embedders(embedding_configs: Vec<IndexEmbeddingConfig>) -> Result<EmbeddingCo
.map_err(crate::vector::Error::from) .map_err(crate::vector::Error::from)
.map_err(crate::Error::from)?, .map_err(crate::Error::from)?,
); );
Ok((name, (embedder, prompt, quantized.unwrap_or_default()))) Ok((name, (embedder, prompt)))
}, },
) )
.collect(); .collect();
@@ -1596,7 +1581,6 @@ fn validate_prompt(
response, response,
distribution, distribution,
headers, headers,
binary_quantized: binary_quantize,
}) => { }) => {
let max_bytes = match document_template_max_bytes.set() { let max_bytes = match document_template_max_bytes.set() {
Some(max_bytes) => NonZeroUsize::new(max_bytes).ok_or_else(|| { Some(max_bytes) => NonZeroUsize::new(max_bytes).ok_or_else(|| {
@@ -1629,7 +1613,6 @@ fn validate_prompt(
response, response,
distribution, distribution,
headers, headers,
binary_quantized: binary_quantize,
})) }))
} }
new => Ok(new), new => Ok(new),
@@ -1655,7 +1638,6 @@ pub fn validate_embedding_settings(
response, response,
distribution, distribution,
headers, headers,
binary_quantized: binary_quantize,
} = settings; } = settings;
if let Some(0) = dimensions.set() { if let Some(0) = dimensions.set() {
@@ -1696,7 +1678,6 @@ pub fn validate_embedding_settings(
response, response,
distribution, distribution,
headers, headers,
binary_quantized: binary_quantize,
})); }));
}; };
match inferred_source { match inferred_source {
@@ -1798,7 +1779,6 @@ pub fn validate_embedding_settings(
response, response,
distribution, distribution,
headers, headers,
binary_quantized: binary_quantize,
})) }))
} }

View File

@@ -1,12 +1,8 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use arroy::distances::{BinaryQuantizedCosine, Cosine};
use arroy::ItemId;
use deserr::{DeserializeError, Deserr}; use deserr::{DeserializeError, Deserr};
use heed::{RoTxn, RwTxn, Unspecified};
use ordered_float::OrderedFloat; use ordered_float::OrderedFloat;
use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use self::error::{EmbedError, NewEmbedderError}; use self::error::{EmbedError, NewEmbedderError};
@@ -30,386 +26,6 @@ pub type Embedding = Vec<f32>;
pub const REQUEST_PARALLELISM: usize = 40; pub const REQUEST_PARALLELISM: usize = 40;
pub struct ArroyWrapper {
quantized: bool,
embedder_index: u8,
database: arroy::Database<Unspecified>,
}
impl ArroyWrapper {
pub fn new(
database: arroy::Database<Unspecified>,
embedder_index: u8,
quantized: bool,
) -> Self {
Self { database, embedder_index, quantized }
}
pub fn embedder_index(&self) -> u8 {
self.embedder_index
}
fn readers<'a, D: arroy::Distance>(
&'a self,
rtxn: &'a RoTxn<'a>,
db: arroy::Database<D>,
) -> impl Iterator<Item = Result<arroy::Reader<D>, arroy::Error>> + 'a {
arroy_db_range_for_embedder(self.embedder_index).map_while(move |index| {
match arroy::Reader::open(rtxn, index, db) {
Ok(reader) => match reader.is_empty(rtxn) {
Ok(false) => Some(Ok(reader)),
Ok(true) => None,
Err(e) => Some(Err(e)),
},
Err(arroy::Error::MissingMetadata(_)) => None,
Err(e) => Some(Err(e)),
}
})
}
pub fn dimensions(&self, rtxn: &RoTxn) -> Result<usize, arroy::Error> {
let first_id = arroy_db_range_for_embedder(self.embedder_index).next().unwrap();
if self.quantized {
Ok(arroy::Reader::open(rtxn, first_id, self.quantized_db())?.dimensions())
} else {
Ok(arroy::Reader::open(rtxn, first_id, self.angular_db())?.dimensions())
}
}
pub fn build_and_quantize<R: rand::Rng + rand::SeedableRng>(
&mut self,
wtxn: &mut RwTxn,
rng: &mut R,
dimension: usize,
quantizing: bool,
cancel: &(impl Fn() -> bool + Sync + Send),
) -> Result<(), arroy::Error> {
for index in arroy_db_range_for_embedder(self.embedder_index) {
if self.quantized {
let writer = arroy::Writer::new(self.quantized_db(), index, dimension);
if writer.need_build(wtxn)? {
writer.builder(rng).build(wtxn)?
} else if writer.is_empty(wtxn)? {
break;
}
} else {
let writer = arroy::Writer::new(self.angular_db(), index, dimension);
// If we are quantizing the databases, we can't know from meilisearch
// if the db was empty but still contained the wrong metadata, thus we need
// to quantize everything and can't stop early. Since this operation can
// only happens once in the life of an embedder, it's not very performances
// sensitive.
if quantizing && !self.quantized {
let writer = writer.prepare_changing_distance::<BinaryQuantizedCosine>(wtxn)?;
writer.builder(rng).cancel(cancel).build(wtxn)?;
} else if writer.need_build(wtxn)? {
writer.builder(rng).cancel(cancel).build(wtxn)?;
} else if writer.is_empty(wtxn)? {
break;
}
}
}
Ok(())
}
/// Overwrite all the embeddings associated with the index and item ID.
/// /!\ It won't remove embeddings after the last passed embedding, which can leave stale embeddings.
/// You should call `del_items` on the `item_id` before calling this method.
/// /!\ Cannot insert more than u8::MAX embeddings; after inserting u8::MAX embeddings, all the remaining ones will be silently ignored.
pub fn add_items(
&self,
wtxn: &mut RwTxn,
item_id: arroy::ItemId,
embeddings: &Embeddings<f32>,
) -> Result<(), arroy::Error> {
let dimension = embeddings.dimension();
for (index, vector) in
arroy_db_range_for_embedder(self.embedder_index).zip(embeddings.iter())
{
if self.quantized {
arroy::Writer::new(self.quantized_db(), index, dimension)
.add_item(wtxn, item_id, vector)?
} else {
arroy::Writer::new(self.angular_db(), index, dimension)
.add_item(wtxn, item_id, vector)?
}
}
Ok(())
}
/// Add one document int for this index where we can find an empty spot.
pub fn add_item(
&self,
wtxn: &mut RwTxn,
item_id: arroy::ItemId,
vector: &[f32],
) -> Result<(), arroy::Error> {
if self.quantized {
self._add_item(wtxn, self.quantized_db(), item_id, vector)
} else {
self._add_item(wtxn, self.angular_db(), item_id, vector)
}
}
fn _add_item<D: arroy::Distance>(
&self,
wtxn: &mut RwTxn,
db: arroy::Database<D>,
item_id: arroy::ItemId,
vector: &[f32],
) -> Result<(), arroy::Error> {
let dimension = vector.len();
for index in arroy_db_range_for_embedder(self.embedder_index) {
let writer = arroy::Writer::new(db, index, dimension);
if !writer.contains_item(wtxn, item_id)? {
writer.add_item(wtxn, item_id, vector)?;
break;
}
}
Ok(())
}
/// Delete all embeddings from a specific `item_id`
pub fn del_items(
&self,
wtxn: &mut RwTxn,
dimension: usize,
item_id: arroy::ItemId,
) -> Result<(), arroy::Error> {
for index in arroy_db_range_for_embedder(self.embedder_index) {
if self.quantized {
let writer = arroy::Writer::new(self.quantized_db(), index, dimension);
if !writer.del_item(wtxn, item_id)? {
break;
}
} else {
let writer = arroy::Writer::new(self.angular_db(), index, dimension);
if !writer.del_item(wtxn, item_id)? {
break;
}
}
}
Ok(())
}
/// Delete one item.
pub fn del_item(
&self,
wtxn: &mut RwTxn,
item_id: arroy::ItemId,
vector: &[f32],
) -> Result<bool, arroy::Error> {
if self.quantized {
self._del_item(wtxn, self.quantized_db(), item_id, vector)
} else {
self._del_item(wtxn, self.angular_db(), item_id, vector)
}
}
fn _del_item<D: arroy::Distance>(
&self,
wtxn: &mut RwTxn,
db: arroy::Database<D>,
item_id: arroy::ItemId,
vector: &[f32],
) -> Result<bool, arroy::Error> {
let dimension = vector.len();
let mut deleted_index = None;
for index in arroy_db_range_for_embedder(self.embedder_index) {
let writer = arroy::Writer::new(db, index, dimension);
let Some(candidate) = writer.item_vector(wtxn, item_id)? else {
// uses invariant: vectors are packed in the first writers.
break;
};
if candidate == vector {
writer.del_item(wtxn, item_id)?;
deleted_index = Some(index);
}
}
// 🥲 enforce invariant: vectors are packed in the first writers.
if let Some(deleted_index) = deleted_index {
let mut last_index_with_a_vector = None;
for index in
arroy_db_range_for_embedder(self.embedder_index).skip(deleted_index as usize)
{
let writer = arroy::Writer::new(db, index, dimension);
let Some(candidate) = writer.item_vector(wtxn, item_id)? else {
break;
};
last_index_with_a_vector = Some((index, candidate));
}
if let Some((last_index, vector)) = last_index_with_a_vector {
let writer = arroy::Writer::new(db, last_index, dimension);
writer.del_item(wtxn, item_id)?;
let writer = arroy::Writer::new(db, deleted_index, dimension);
writer.add_item(wtxn, item_id, &vector)?;
}
}
Ok(deleted_index.is_some())
}
pub fn clear(&self, wtxn: &mut RwTxn, dimension: usize) -> Result<(), arroy::Error> {
for index in arroy_db_range_for_embedder(self.embedder_index) {
if self.quantized {
let writer = arroy::Writer::new(self.quantized_db(), index, dimension);
if writer.is_empty(wtxn)? {
break;
}
writer.clear(wtxn)?;
} else {
let writer = arroy::Writer::new(self.angular_db(), index, dimension);
if writer.is_empty(wtxn)? {
break;
}
writer.clear(wtxn)?;
}
}
Ok(())
}
pub fn contains_item(
&self,
rtxn: &RoTxn,
dimension: usize,
item: arroy::ItemId,
) -> Result<bool, arroy::Error> {
for index in arroy_db_range_for_embedder(self.embedder_index) {
let contains = if self.quantized {
let writer = arroy::Writer::new(self.quantized_db(), index, dimension);
if writer.is_empty(rtxn)? {
break;
}
writer.contains_item(rtxn, item)?
} else {
let writer = arroy::Writer::new(self.angular_db(), index, dimension);
if writer.is_empty(rtxn)? {
break;
}
writer.contains_item(rtxn, item)?
};
if contains {
return Ok(contains);
}
}
Ok(false)
}
pub fn nns_by_item(
&self,
rtxn: &RoTxn,
item: ItemId,
limit: usize,
filter: Option<&RoaringBitmap>,
) -> Result<Vec<(ItemId, f32)>, arroy::Error> {
if self.quantized {
self._nns_by_item(rtxn, self.quantized_db(), item, limit, filter)
} else {
self._nns_by_item(rtxn, self.angular_db(), item, limit, filter)
}
}
fn _nns_by_item<D: arroy::Distance>(
&self,
rtxn: &RoTxn,
db: arroy::Database<D>,
item: ItemId,
limit: usize,
filter: Option<&RoaringBitmap>,
) -> Result<Vec<(ItemId, f32)>, arroy::Error> {
let mut results = Vec::new();
for reader in self.readers(rtxn, db) {
let reader = reader?;
let mut searcher = reader.nns(limit);
if let Some(filter) = filter {
searcher.candidates(filter);
}
if let Some(mut ret) = searcher.by_item(rtxn, item)? {
results.append(&mut ret);
} else {
break;
}
}
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));
Ok(results)
}
pub fn nns_by_vector(
&self,
rtxn: &RoTxn,
vector: &[f32],
limit: usize,
filter: Option<&RoaringBitmap>,
) -> Result<Vec<(ItemId, f32)>, arroy::Error> {
if self.quantized {
self._nns_by_vector(rtxn, self.quantized_db(), vector, limit, filter)
} else {
self._nns_by_vector(rtxn, self.angular_db(), vector, limit, filter)
}
}
fn _nns_by_vector<D: arroy::Distance>(
&self,
rtxn: &RoTxn,
db: arroy::Database<D>,
vector: &[f32],
limit: usize,
filter: Option<&RoaringBitmap>,
) -> Result<Vec<(ItemId, f32)>, arroy::Error> {
let mut results = Vec::new();
for reader in self.readers(rtxn, db) {
let reader = reader?;
let mut searcher = reader.nns(limit);
if let Some(filter) = filter {
searcher.candidates(filter);
}
results.append(&mut searcher.by_vector(rtxn, vector)?);
}
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));
Ok(results)
}
pub fn item_vectors(&self, rtxn: &RoTxn, item_id: u32) -> Result<Vec<Vec<f32>>, arroy::Error> {
let mut vectors = Vec::new();
if self.quantized {
for reader in self.readers(rtxn, self.quantized_db()) {
if let Some(vec) = reader?.item_vector(rtxn, item_id)? {
vectors.push(vec);
} else {
break;
}
}
} else {
for reader in self.readers(rtxn, self.angular_db()) {
if let Some(vec) = reader?.item_vector(rtxn, item_id)? {
vectors.push(vec);
} else {
break;
}
}
}
Ok(vectors)
}
fn angular_db(&self) -> arroy::Database<Cosine> {
self.database.remap_data_type()
}
fn quantized_db(&self) -> arroy::Database<BinaryQuantizedCosine> {
self.database.remap_data_type()
}
}
/// One or multiple embeddings stored consecutively in a flat vector. /// One or multiple embeddings stored consecutively in a flat vector.
pub struct Embeddings<F> { pub struct Embeddings<F> {
data: Vec<F>, data: Vec<F>,
@@ -508,48 +124,62 @@ pub struct EmbeddingConfig {
pub embedder_options: EmbedderOptions, pub embedder_options: EmbedderOptions,
/// Document template /// Document template
pub prompt: PromptData, pub prompt: PromptData,
/// If this embedder is binary quantized
pub quantized: Option<bool>,
// TODO: add metrics and anything needed // TODO: add metrics and anything needed
} }
impl EmbeddingConfig {
pub fn quantized(&self) -> bool {
self.quantized.unwrap_or_default()
}
}
/// Map of embedder configurations. /// Map of embedder configurations.
/// ///
/// Each configuration is mapped to a name. /// Each configuration is mapped to a name.
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct EmbeddingConfigs(HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)>); pub struct EmbeddingConfigs(HashMap<String, (Arc<Embedder>, Arc<Prompt>)>);
impl EmbeddingConfigs { impl EmbeddingConfigs {
/// Create the map from its internal component.s /// Create the map from its internal component.s
pub fn new(data: HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)>) -> Self { pub fn new(data: HashMap<String, (Arc<Embedder>, Arc<Prompt>)>) -> Self {
Self(data) Self(data)
} }
/// Get an embedder configuration and template from its name. /// Get an embedder configuration and template from its name.
pub fn get(&self, name: &str) -> Option<(Arc<Embedder>, Arc<Prompt>, bool)> { pub fn get(&self, name: &str) -> Option<(Arc<Embedder>, Arc<Prompt>)> {
self.0.get(name).cloned() self.0.get(name).cloned()
} }
pub fn inner_as_ref(&self) -> &HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)> { /// Get the default embedder configuration, if any.
pub fn get_default(&self) -> Option<(Arc<Embedder>, Arc<Prompt>)> {
self.get(self.get_default_embedder_name())
}
pub fn inner_as_ref(&self) -> &HashMap<String, (Arc<Embedder>, Arc<Prompt>)> {
&self.0 &self.0
} }
pub fn into_inner(self) -> HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)> { pub fn into_inner(self) -> HashMap<String, (Arc<Embedder>, Arc<Prompt>)> {
self.0 self.0
} }
/// Get the name of the default embedder configuration.
///
/// The default embedder is determined as follows:
///
/// - If there is only one embedder, it is always the default.
/// - If there are multiple embedders and one of them is called `default`, then that one is the default embedder.
/// - In all other cases, there is no default embedder.
pub fn get_default_embedder_name(&self) -> &str {
let mut it = self.0.keys();
let first_name = it.next();
let second_name = it.next();
match (first_name, second_name) {
(None, _) => "default",
(Some(first), None) => first,
(Some(_), Some(_)) => "default",
}
}
} }
impl IntoIterator for EmbeddingConfigs { impl IntoIterator for EmbeddingConfigs {
type Item = (String, (Arc<Embedder>, Arc<Prompt>, bool)); type Item = (String, (Arc<Embedder>, Arc<Prompt>));
type IntoIter = type IntoIter = std::collections::hash_map::IntoIter<String, (Arc<Embedder>, Arc<Prompt>)>;
std::collections::hash_map::IntoIter<String, (Arc<Embedder>, Arc<Prompt>, bool)>;
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {
self.0.into_iter() self.0.into_iter()

View File

@@ -32,9 +32,6 @@ pub struct EmbeddingSettings {
pub dimensions: Setting<usize>, pub dimensions: Setting<usize>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)] #[deserr(default)]
pub binary_quantized: Setting<bool>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub document_template: Setting<String>, pub document_template: Setting<String>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")] #[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)] #[deserr(default)]
@@ -88,63 +85,23 @@ pub enum ReindexAction {
pub enum SettingsDiff { pub enum SettingsDiff {
Remove, Remove,
Reindex { action: ReindexAction, updated_settings: EmbeddingSettings, quantize: bool }, Reindex { action: ReindexAction, updated_settings: EmbeddingSettings },
UpdateWithoutReindex { updated_settings: EmbeddingSettings, quantize: bool }, UpdateWithoutReindex { updated_settings: EmbeddingSettings },
} }
#[derive(Default, Debug)] pub enum EmbedderAction {
pub struct EmbedderAction { WriteBackToDocuments(WriteBackToDocuments),
pub was_quantized: bool, Reindex(ReindexAction),
pub is_being_quantized: bool,
pub write_back: Option<WriteBackToDocuments>,
pub reindex: Option<ReindexAction>,
} }
impl EmbedderAction {
pub fn is_being_quantized(&self) -> bool {
self.is_being_quantized
}
pub fn write_back(&self) -> Option<&WriteBackToDocuments> {
self.write_back.as_ref()
}
pub fn reindex(&self) -> Option<&ReindexAction> {
self.reindex.as_ref()
}
pub fn with_is_being_quantized(mut self, quantize: bool) -> Self {
self.is_being_quantized = quantize;
self
}
pub fn with_write_back(write_back: WriteBackToDocuments, was_quantized: bool) -> Self {
Self {
was_quantized,
is_being_quantized: false,
write_back: Some(write_back),
reindex: None,
}
}
pub fn with_reindex(reindex: ReindexAction, was_quantized: bool) -> Self {
Self { was_quantized, is_being_quantized: false, write_back: None, reindex: Some(reindex) }
}
}
#[derive(Debug)]
pub struct WriteBackToDocuments { pub struct WriteBackToDocuments {
pub embedder_id: u8, pub embedder_id: u8,
pub user_provided: RoaringBitmap, pub user_provided: RoaringBitmap,
} }
impl SettingsDiff { impl SettingsDiff {
pub fn from_settings( pub fn from_settings(old: EmbeddingSettings, new: Setting<EmbeddingSettings>) -> Self {
embedder_name: &str, match new {
old: EmbeddingSettings,
new: Setting<EmbeddingSettings>,
) -> Result<Self, UserError> {
let ret = match new {
Setting::Set(new) => { Setting::Set(new) => {
let EmbeddingSettings { let EmbeddingSettings {
mut source, mut source,
@@ -159,7 +116,6 @@ impl SettingsDiff {
mut distribution, mut distribution,
mut headers, mut headers,
mut document_template_max_bytes, mut document_template_max_bytes,
binary_quantized: mut binary_quantize,
} = old; } = old;
let EmbeddingSettings { let EmbeddingSettings {
@@ -175,17 +131,8 @@ impl SettingsDiff {
distribution: new_distribution, distribution: new_distribution,
headers: new_headers, headers: new_headers,
document_template_max_bytes: new_document_template_max_bytes, document_template_max_bytes: new_document_template_max_bytes,
binary_quantized: new_binary_quantize,
} = new; } = new;
if matches!(binary_quantize, Setting::Set(true))
&& matches!(new_binary_quantize, Setting::Set(false))
{
return Err(UserError::InvalidDisableBinaryQuantization {
embedder_name: embedder_name.to_string(),
});
}
let mut reindex_action = None; let mut reindex_action = None;
// **Warning**: do not use short-circuiting || here, we want all these operations applied // **Warning**: do not use short-circuiting || here, we want all these operations applied
@@ -225,7 +172,6 @@ impl SettingsDiff {
_ => {} _ => {}
} }
} }
let binary_quantize_changed = binary_quantize.apply(new_binary_quantize);
if url.apply(new_url) { if url.apply(new_url) {
match source { match source {
// do not regenerate on an url change in OpenAI // do not regenerate on an url change in OpenAI
@@ -285,27 +231,16 @@ impl SettingsDiff {
distribution, distribution,
headers, headers,
document_template_max_bytes, document_template_max_bytes,
binary_quantized: binary_quantize,
}; };
match reindex_action { match reindex_action {
Some(action) => Self::Reindex { Some(action) => Self::Reindex { action, updated_settings },
action, None => Self::UpdateWithoutReindex { updated_settings },
updated_settings,
quantize: binary_quantize_changed,
},
None => Self::UpdateWithoutReindex {
updated_settings,
quantize: binary_quantize_changed,
},
} }
} }
Setting::Reset => Self::Remove, Setting::Reset => Self::Remove,
Setting::NotSet => { Setting::NotSet => Self::UpdateWithoutReindex { updated_settings: old },
Self::UpdateWithoutReindex { updated_settings: old, quantize: false } }
}
};
Ok(ret)
} }
} }
@@ -417,8 +352,6 @@ impl EmbeddingSettings {
pub const DISTRIBUTION: &'static str = "distribution"; pub const DISTRIBUTION: &'static str = "distribution";
pub const BINARY_QUANTIZED: &'static str = "binaryQuantized";
pub fn allowed_sources_for_field(field: &'static str) -> &'static [EmbedderSource] { pub fn allowed_sources_for_field(field: &'static str) -> &'static [EmbedderSource] {
match field { match field {
Self::SOURCE => &[ Self::SOURCE => &[
@@ -458,13 +391,6 @@ impl EmbeddingSettings {
EmbedderSource::Rest, EmbedderSource::Rest,
EmbedderSource::UserProvided, EmbedderSource::UserProvided,
], ],
Self::BINARY_QUANTIZED => &[
EmbedderSource::HuggingFace,
EmbedderSource::Ollama,
EmbedderSource::OpenAi,
EmbedderSource::Rest,
EmbedderSource::UserProvided,
],
_other => unreachable!("unknown field"), _other => unreachable!("unknown field"),
} }
} }
@@ -479,7 +405,6 @@ impl EmbeddingSettings {
Self::DIMENSIONS, Self::DIMENSIONS,
Self::DISTRIBUTION, Self::DISTRIBUTION,
Self::URL, Self::URL,
Self::BINARY_QUANTIZED,
], ],
EmbedderSource::HuggingFace => &[ EmbedderSource::HuggingFace => &[
Self::SOURCE, Self::SOURCE,
@@ -487,7 +412,6 @@ impl EmbeddingSettings {
Self::REVISION, Self::REVISION,
Self::DOCUMENT_TEMPLATE, Self::DOCUMENT_TEMPLATE,
Self::DISTRIBUTION, Self::DISTRIBUTION,
Self::BINARY_QUANTIZED,
], ],
EmbedderSource::Ollama => &[ EmbedderSource::Ollama => &[
Self::SOURCE, Self::SOURCE,
@@ -497,11 +421,8 @@ impl EmbeddingSettings {
Self::API_KEY, Self::API_KEY,
Self::DIMENSIONS, Self::DIMENSIONS,
Self::DISTRIBUTION, Self::DISTRIBUTION,
Self::BINARY_QUANTIZED,
], ],
EmbedderSource::UserProvided => { EmbedderSource::UserProvided => &[Self::SOURCE, Self::DIMENSIONS, Self::DISTRIBUTION],
&[Self::SOURCE, Self::DIMENSIONS, Self::DISTRIBUTION, Self::BINARY_QUANTIZED]
}
EmbedderSource::Rest => &[ EmbedderSource::Rest => &[
Self::SOURCE, Self::SOURCE,
Self::API_KEY, Self::API_KEY,
@@ -512,7 +433,6 @@ impl EmbeddingSettings {
Self::RESPONSE, Self::RESPONSE,
Self::HEADERS, Self::HEADERS,
Self::DISTRIBUTION, Self::DISTRIBUTION,
Self::BINARY_QUANTIZED,
], ],
} }
} }
@@ -566,7 +486,7 @@ impl std::fmt::Display for EmbedderSource {
impl From<EmbeddingConfig> for EmbeddingSettings { impl From<EmbeddingConfig> for EmbeddingSettings {
fn from(value: EmbeddingConfig) -> Self { fn from(value: EmbeddingConfig) -> Self {
let EmbeddingConfig { embedder_options, prompt, quantized } = value; let EmbeddingConfig { embedder_options, prompt } = value;
let document_template_max_bytes = let document_template_max_bytes =
Setting::Set(prompt.max_bytes.unwrap_or(default_max_bytes()).get()); Setting::Set(prompt.max_bytes.unwrap_or(default_max_bytes()).get());
match embedder_options { match embedder_options {
@@ -587,7 +507,6 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
response: Setting::NotSet, response: Setting::NotSet,
headers: Setting::NotSet, headers: Setting::NotSet,
distribution: Setting::some_or_not_set(distribution), distribution: Setting::some_or_not_set(distribution),
binary_quantized: Setting::some_or_not_set(quantized),
}, },
super::EmbedderOptions::OpenAi(super::openai::EmbedderOptions { super::EmbedderOptions::OpenAi(super::openai::EmbedderOptions {
url, url,
@@ -608,7 +527,6 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
response: Setting::NotSet, response: Setting::NotSet,
headers: Setting::NotSet, headers: Setting::NotSet,
distribution: Setting::some_or_not_set(distribution), distribution: Setting::some_or_not_set(distribution),
binary_quantized: Setting::some_or_not_set(quantized),
}, },
super::EmbedderOptions::Ollama(super::ollama::EmbedderOptions { super::EmbedderOptions::Ollama(super::ollama::EmbedderOptions {
embedding_model, embedding_model,
@@ -629,7 +547,6 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
response: Setting::NotSet, response: Setting::NotSet,
headers: Setting::NotSet, headers: Setting::NotSet,
distribution: Setting::some_or_not_set(distribution), distribution: Setting::some_or_not_set(distribution),
binary_quantized: Setting::some_or_not_set(quantized),
}, },
super::EmbedderOptions::UserProvided(super::manual::EmbedderOptions { super::EmbedderOptions::UserProvided(super::manual::EmbedderOptions {
dimensions, dimensions,
@@ -647,7 +564,6 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
response: Setting::NotSet, response: Setting::NotSet,
headers: Setting::NotSet, headers: Setting::NotSet,
distribution: Setting::some_or_not_set(distribution), distribution: Setting::some_or_not_set(distribution),
binary_quantized: Setting::some_or_not_set(quantized),
}, },
super::EmbedderOptions::Rest(super::rest::EmbedderOptions { super::EmbedderOptions::Rest(super::rest::EmbedderOptions {
api_key, api_key,
@@ -670,7 +586,6 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
response: Setting::Set(response), response: Setting::Set(response),
distribution: Setting::some_or_not_set(distribution), distribution: Setting::some_or_not_set(distribution),
headers: Setting::Set(headers), headers: Setting::Set(headers),
binary_quantized: Setting::some_or_not_set(quantized),
}, },
} }
} }
@@ -692,11 +607,8 @@ impl From<EmbeddingSettings> for EmbeddingConfig {
response, response,
distribution, distribution,
headers, headers,
binary_quantized,
} = value; } = value;
this.quantized = binary_quantized.set();
if let Some(source) = source.set() { if let Some(source) = source.set() {
match source { match source {
EmbedderSource::OpenAi => { EmbedderSource::OpenAi => {

View File

@@ -77,8 +77,7 @@
"q": "puppy cute comforting movie", "q": "puppy cute comforting movie",
"limit": 100, "limit": 100,
"hybrid": { "hybrid": {
"semanticRatio": 0.1, "semanticRatio": 0.1
"embedder": "default"
} }
} }
}, },
@@ -92,8 +91,7 @@
"q": "puppy cute comforting movie", "q": "puppy cute comforting movie",
"limit": 100, "limit": 100,
"hybrid": { "hybrid": {
"semanticRatio": 0.5, "semanticRatio": 0.5
"embedder": "default"
} }
} }
}, },
@@ -107,8 +105,7 @@
"q": "puppy cute comforting movie", "q": "puppy cute comforting movie",
"limit": 100, "limit": 100,
"hybrid": { "hybrid": {
"semanticRatio": 0.9, "semanticRatio": 0.9
"embedder": "default"
} }
} }
}, },
@@ -122,8 +119,7 @@
"q": "puppy cute comforting movie", "q": "puppy cute comforting movie",
"limit": 100, "limit": 100,
"hybrid": { "hybrid": {
"semanticRatio": 1.0, "semanticRatio": 1.0
"embedder": "default"
} }
} }
}, },
@@ -137,8 +133,7 @@
"q": "shrek", "q": "shrek",
"limit": 100, "limit": 100,
"hybrid": { "hybrid": {
"semanticRatio": 1.0, "semanticRatio": 1.0
"embedder": "default"
} }
} }
}, },
@@ -152,8 +147,7 @@
"q": "shrek", "q": "shrek",
"limit": 100, "limit": 100,
"hybrid": { "hybrid": {
"semanticRatio": 0.5, "semanticRatio": 0.5
"embedder": "default"
} }
} }
}, },
@@ -167,8 +161,7 @@
"q": "shrek", "q": "shrek",
"limit": 100, "limit": 100,
"hybrid": { "hybrid": {
"semanticRatio": 0.1, "semanticRatio": 0.1
"embedder": "default"
} }
} }
}, },