mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-22 12:46:53 +00:00
Compare commits
170 Commits
prototype-
...
prototype-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
119bb805e5 | ||
|
|
9f10011525 | ||
|
|
54a2029b1f | ||
|
|
4fe09f406c | ||
|
|
9d95f8187d | ||
|
|
fb0904b893 | ||
|
|
eb829e93c5 | ||
|
|
be36afe9e6 | ||
|
|
27c087a4fb | ||
|
|
c618877bce | ||
|
|
982e554639 | ||
|
|
a8cfec19b5 | ||
|
|
5d2b1c2637 | ||
|
|
65fefc7122 | ||
|
|
180cfd71cc | ||
|
|
614cd8cc1c | ||
|
|
c6f6808981 | ||
|
|
9cb77fd310 | ||
|
|
fa8bd6430c | ||
|
|
4c016432e1 | ||
|
|
0aa1f63061 | ||
|
|
d4c88f28f3 | ||
|
|
d90c76d3cc | ||
|
|
f6bc6854f8 | ||
|
|
42001a25ff | ||
|
|
080d5f94dd | ||
|
|
ba0f50e5ef | ||
|
|
ce6230aa85 | ||
|
|
6dc241f9de | ||
|
|
01d1ef65c4 | ||
|
|
3246667590 | ||
|
|
109395c199 | ||
|
|
a0b71a8785 | ||
|
|
00a5c86f13 | ||
|
|
366c37a686 | ||
|
|
afc164a271 | ||
|
|
bdc2d1e64d | ||
|
|
0312fb22b8 | ||
|
|
b85657de1e | ||
|
|
626be0ef28 | ||
|
|
1b476b8a35 | ||
|
|
a1b42c10e2 | ||
|
|
d67db6e3c2 | ||
|
|
760ccffdbd | ||
|
|
338806283b | ||
|
|
fe15e11c9d | ||
|
|
f1d92bfead | ||
|
|
a005a062da | ||
|
|
fd8b2451d7 | ||
|
|
058f9ffda5 | ||
|
|
5d363205a5 | ||
|
|
a683faa882 | ||
|
|
8887cbdcd5 | ||
|
|
634865ff53 | ||
|
|
36fccf8525 | ||
|
|
d6bd60d569 | ||
|
|
48ad959fc1 | ||
|
|
1bc30cb4c8 | ||
|
|
77138a42d6 | ||
|
|
0791506124 | ||
|
|
2a015ac3b8 | ||
|
|
6f248b78a9 | ||
|
|
d694e312ff | ||
|
|
d76dcc8998 | ||
|
|
e654f66223 | ||
|
|
34f2ab7093 | ||
|
|
1a9dbd364e | ||
|
|
662c5d9871 | ||
|
|
5cd61b50f9 | ||
|
|
9a9be76757 | ||
|
|
cfa6ba6c3b | ||
|
|
f4f333dbf6 | ||
|
|
1ade76ba10 | ||
|
|
ae26658913 | ||
|
|
aa09edb3fb | ||
|
|
3f42f1a036 | ||
|
|
9bdfdd395b | ||
|
|
78d0625a91 | ||
|
|
3f655ea20e | ||
|
|
50bc1d55f3 | ||
|
|
0a4f2ef891 | ||
|
|
faa1f7c5b7 | ||
|
|
3cc5d86598 | ||
|
|
1ae47bec77 | ||
|
|
2f1be0ff86 | ||
|
|
9cee432255 | ||
|
|
ff8d48d2f1 | ||
|
|
a56c036994 | ||
|
|
511c48f520 | ||
|
|
4623691d1f | ||
|
|
3261aadcf2 | ||
|
|
073e9f2967 | ||
|
|
5f8f48ec95 | ||
|
|
ed2fe365a0 | ||
|
|
f7c8a77f89 | ||
|
|
132065afda | ||
|
|
51c298662b | ||
|
|
70a860a0f0 | ||
|
|
a3254d7d7d | ||
|
|
73c9c1ebdc | ||
|
|
fa3990daf9 | ||
|
|
c5993196b3 | ||
|
|
16234e1313 | ||
|
|
be9f4f96df | ||
|
|
b274106ad3 | ||
|
|
48527761e7 | ||
|
|
6792d048b8 | ||
|
|
8dfded2993 | ||
|
|
3714f16696 | ||
|
|
d0cd3cacec | ||
|
|
caccb51814 | ||
|
|
cf9b311f71 | ||
|
|
7423243be0 | ||
|
|
5690700601 | ||
|
|
2faad504c6 | ||
|
|
2bcd69750f | ||
|
|
de24e75be8 | ||
|
|
a3af9fe057 | ||
|
|
90683d0e4e | ||
|
|
5c79273748 | ||
|
|
b45eea0d3e | ||
|
|
0b89ef1fd7 | ||
|
|
65ba7b47af | ||
|
|
8af76a65bf | ||
|
|
f60814b319 | ||
|
|
5a675bcb82 | ||
|
|
600178c5ab | ||
|
|
dedae94102 | ||
|
|
7ae9a4afee | ||
|
|
e92b6beb20 | ||
|
|
27cc357362 | ||
|
|
73dfeefc7c | ||
|
|
d85480de89 | ||
|
|
9f55708d84 | ||
|
|
280c3907be | ||
|
|
8419fd9b3b | ||
|
|
283944ea89 | ||
|
|
8aacd6374a | ||
|
|
8326f34ad1 | ||
|
|
f4a908669c | ||
|
|
eb2c2815b6 | ||
|
|
29e9c74a49 | ||
|
|
f6803dd7d1 | ||
|
|
f86f4f619f | ||
|
|
e35d58b531 | ||
|
|
63827bbee0 | ||
|
|
340d9e6edc | ||
|
|
28adbc0d18 | ||
|
|
e3fba62e13 | ||
|
|
fb9170b8e3 | ||
|
|
c15763f910 | ||
|
|
4534dc2cab | ||
|
|
b05cb80803 | ||
|
|
6e0526090a | ||
|
|
2090e9ea31 | ||
|
|
1c8f1c18f4 | ||
|
|
c4a96b40eb | ||
|
|
2d6dc83940 | ||
|
|
ab768f379f | ||
|
|
705e9a9e5e | ||
|
|
67f2a30d7c | ||
|
|
99732f4084 | ||
|
|
5081d837ea | ||
|
|
9e1cb792f4 | ||
|
|
b6b7ede266 | ||
|
|
f50e586a4f | ||
|
|
11fedea788 | ||
|
|
032b34c377 | ||
|
|
b421c8e7de | ||
|
|
00eb258a53 |
10
.gitignore
vendored
10
.gitignore
vendored
@@ -5,18 +5,24 @@
|
||||
**/*.json_lines
|
||||
**/*.rs.bk
|
||||
/*.mdb
|
||||
/data.ms
|
||||
/*.ms
|
||||
/snapshots
|
||||
/dumps
|
||||
/bench
|
||||
/_xtask_benchmark.ms
|
||||
/benchmarks
|
||||
.DS_Store
|
||||
|
||||
# Snapshots
|
||||
## ... large
|
||||
*.full.snap
|
||||
## ... unreviewed
|
||||
## ... unreviewed
|
||||
*.snap.new
|
||||
## ... pending
|
||||
*.pending-snap
|
||||
|
||||
# Tmp files
|
||||
.tmp*
|
||||
|
||||
# Database snapshot
|
||||
crates/meilisearch/db.snapshot
|
||||
|
||||
13
Cargo.lock
generated
13
Cargo.lock
generated
@@ -3745,6 +3745,7 @@ dependencies = [
|
||||
"actix-web-lab",
|
||||
"anyhow",
|
||||
"async-openai",
|
||||
"backoff",
|
||||
"brotli",
|
||||
"bstr",
|
||||
"build-info",
|
||||
@@ -3775,6 +3776,7 @@ dependencies = [
|
||||
"meili-snap",
|
||||
"meilisearch-auth",
|
||||
"meilisearch-types",
|
||||
"memmap2",
|
||||
"mimalloc",
|
||||
"mime",
|
||||
"mopa-maintained",
|
||||
@@ -3908,9 +3910,9 @@ checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
|
||||
|
||||
[[package]]
|
||||
name = "memmap2"
|
||||
version = "0.9.5"
|
||||
version = "0.9.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f"
|
||||
checksum = "483758ad303d734cec05e5c12b41d7e93e6a6390c5e9dae6bdeb7c1259012d28"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"stable_deref_trait",
|
||||
@@ -3987,6 +3989,7 @@ dependencies = [
|
||||
"time",
|
||||
"tokenizers",
|
||||
"tracing",
|
||||
"twox-hash",
|
||||
"ureq",
|
||||
"url",
|
||||
"utoipa",
|
||||
@@ -6429,6 +6432,12 @@ version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
|
||||
[[package]]
|
||||
name = "twox-hash"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b907da542cbced5261bd3256de1b3a1bf340a3d37f93425a07362a1d687de56"
|
||||
|
||||
[[package]]
|
||||
name = "typeid"
|
||||
version = "1.0.3"
|
||||
|
||||
8
LICENSE
8
LICENSE
@@ -19,3 +19,11 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
đź”’ Meilisearch Enterprise Edition (EE)
|
||||
|
||||
Certain parts of this codebase are not licensed under the MIT license and governed by the Business Source License 1.1.
|
||||
|
||||
See the LICENSE-EE file for details.
|
||||
|
||||
67
LICENSE-EE
Normal file
67
LICENSE-EE
Normal file
@@ -0,0 +1,67 @@
|
||||
Business Source License 1.1 – Adapted for Meili SAS
|
||||
This license is based on the Business Source License version 1.1, as published by MariaDB Corporation Ab.
|
||||
|
||||
Parameters
|
||||
|
||||
Licensor: Meili SAS
|
||||
|
||||
Licensed Work: Any file explicitly marked as “Enterprise Edition (EE)” or “governed by the Business Source License”.
|
||||
|
||||
Additional Use Grant:
|
||||
You may use, modify, and distribute the Licensed Work for non-production purposes only, such as testing, development, or evaluation.
|
||||
|
||||
Production use of the Licensed Work requires a commercial license agreement with Meilisearch. Contact bonjour@meilisearch.com for licensing.
|
||||
|
||||
Change License: MIT
|
||||
|
||||
Change Date: Four years from the date the Licensed Work is published.
|
||||
|
||||
This License does not apply to any code outside of the Licensed Work, which remains under the MIT license.
|
||||
|
||||
For information about alternative licensing arrangements for the Licensed Work,
|
||||
please contact bonjour@meilisearch.com or sales@meilisearch.com.
|
||||
|
||||
Notice
|
||||
|
||||
Business Source License 1.1
|
||||
|
||||
Terms
|
||||
|
||||
The Licensor hereby grants you the right to copy, modify, create derivative
|
||||
works, redistribute, and make non-production use of the Licensed Work. The
|
||||
Licensor may make an Additional Use Grant, above, permitting limited production use.
|
||||
|
||||
Effective on the Change Date, or the fourth anniversary of the first publicly
|
||||
available distribution of a specific version of the Licensed Work under this
|
||||
License, whichever comes first, the Licensor hereby grants you rights under
|
||||
the terms of the Change License, and the rights granted in the paragraph
|
||||
above terminate.
|
||||
|
||||
If your use of the Licensed Work does not comply with the requirements
|
||||
currently in effect as described in this License, you must purchase a
|
||||
commercial license from the Licensor, its affiliated entities, or authorized
|
||||
resellers, or you must refrain from using the Licensed Work.
|
||||
|
||||
All copies of the original and modified Licensed Work, and derivative works
|
||||
of the Licensed Work, are subject to this License. This License applies
|
||||
separately for each version of the Licensed Work and the Change Date may vary
|
||||
for each version of the Licensed Work released by Licensor.
|
||||
|
||||
You must conspicuously display this License on each original or modified copy
|
||||
of the Licensed Work. If you receive the Licensed Work in original or
|
||||
modified form from a third party, the terms and conditions set forth in this
|
||||
License apply to your use of that work.
|
||||
|
||||
Any use of the Licensed Work in violation of this License will automatically
|
||||
terminate your rights under this License for the current and all other
|
||||
versions of the Licensed Work.
|
||||
|
||||
This License does not grant you any right in any trademark or logo of
|
||||
Licensor or its affiliates (provided that you may use a trademark or logo of
|
||||
Licensor as expressly required by this License).
|
||||
|
||||
TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
|
||||
AN "AS IS" BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
|
||||
EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
|
||||
TITLE.
|
||||
20
README.md
20
README.md
@@ -89,6 +89,26 @@ We also offer a wide range of dedicated guides to all Meilisearch features, such
|
||||
|
||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
||||
|
||||
## đź§ľ Editions & Licensing
|
||||
|
||||
Meilisearch is available in two editions:
|
||||
|
||||
### đź§Ş Community Edition (CE)
|
||||
|
||||
- Fully open source under the [MIT license](./LICENSE)
|
||||
- Core search engine with fast and relevant full-text, semantic or hybrid search
|
||||
- Free to use for anyone, including commercial usage
|
||||
|
||||
### 🏢 Enterprise Edition (EE)
|
||||
|
||||
- Includes advanced features such as:
|
||||
- Sharding
|
||||
- Governed by a [commercial license](./LICENSE-EE) or the [Business Source License 1.1](https://mariadb.com/bsl11)
|
||||
- Not allowed in production without a commercial agreement with Meilisearch.
|
||||
- You may use, modify, and distribute the Licensed Work for non-production purposes only, such as testing, development, or evaluation.
|
||||
|
||||
Want access to Enterprise features? → Contact us at [sales@meilisearch.com](maito:sales@meilisearch.com).
|
||||
|
||||
## 📊 Telemetry
|
||||
|
||||
Meilisearch collects **anonymized** user data to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
||||
|
||||
@@ -14,7 +14,7 @@ license.workspace = true
|
||||
anyhow = "1.0.98"
|
||||
bumpalo = "3.18.1"
|
||||
csv = "1.3.1"
|
||||
memmap2 = "0.9.5"
|
||||
memmap2 = "0.9.7"
|
||||
milli = { path = "../milli" }
|
||||
mimalloc = { version = "0.1.47", default-features = false }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
@@ -51,3 +51,7 @@ harness = false
|
||||
[[bench]]
|
||||
name = "indexing"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "sort"
|
||||
harness = false
|
||||
|
||||
@@ -154,6 +154,7 @@ fn indexing_songs_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -221,6 +222,7 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -266,6 +268,7 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -335,6 +338,7 @@ fn deleting_songs_in_batches_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -412,6 +416,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -457,6 +462,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -498,6 +504,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -566,6 +573,7 @@ fn indexing_songs_without_faceted_numbers(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -633,6 +641,7 @@ fn indexing_songs_without_faceted_fields(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -700,6 +709,7 @@ fn indexing_wiki(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -766,6 +776,7 @@ fn reindexing_wiki(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -811,6 +822,7 @@ fn reindexing_wiki(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -879,6 +891,7 @@ fn deleting_wiki_in_batches_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -956,6 +969,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1002,6 +1016,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1044,6 +1059,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1111,6 +1127,7 @@ fn indexing_movies_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1177,6 +1194,7 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1222,6 +1240,7 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1290,6 +1309,7 @@ fn deleting_movies_in_batches_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1404,6 +1424,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1449,6 +1470,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1490,6 +1512,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1580,6 +1603,7 @@ fn indexing_nested_movies_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1671,6 +1695,7 @@ fn deleting_nested_movies_in_batches_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1754,6 +1779,7 @@ fn indexing_nested_movies_without_faceted_fields(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1821,6 +1847,7 @@ fn indexing_geo(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1887,6 +1914,7 @@ fn reindexing_geo(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1932,6 +1960,7 @@ fn reindexing_geo(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -2000,6 +2029,7 @@ fn deleting_geo_in_batches_default(c: &mut Criterion) {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
||||
114
crates/benchmarks/benches/sort.rs
Normal file
114
crates/benchmarks/benches/sort.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
//! This benchmark module is used to compare the performance of sorting documents in /search VS /documents
|
||||
//!
|
||||
//! The tests/benchmarks were designed in the context of a query returning only 20 documents.
|
||||
|
||||
mod datasets_paths;
|
||||
mod utils;
|
||||
|
||||
use criterion::{criterion_group, criterion_main};
|
||||
use milli::update::Settings;
|
||||
use utils::Conf;
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[global_allocator]
|
||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
fn base_conf(builder: &mut Settings) {
|
||||
let displayed_fields =
|
||||
["geonameid", "name", "asciiname", "alternatenames", "_geo", "population"]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
builder.set_displayed_fields(displayed_fields);
|
||||
|
||||
let sortable_fields =
|
||||
["_geo", "name", "population", "elevation", "timezone", "modification-date"]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
builder.set_sortable_fields(sortable_fields);
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
const BASE_CONF: Conf = Conf {
|
||||
dataset: datasets_paths::SMOL_ALL_COUNTRIES,
|
||||
dataset_format: "jsonl",
|
||||
configure: base_conf,
|
||||
primary_key: Some("geonameid"),
|
||||
queries: &[""],
|
||||
offsets: &[
|
||||
Some((0, 20)), // The most common query in the real world
|
||||
Some((0, 500)), // A query that ranges over many documents
|
||||
Some((980, 20)), // The worst query that could happen in the real world
|
||||
Some((800_000, 20)) // The worst query
|
||||
],
|
||||
get_documents: true,
|
||||
..Conf::BASE
|
||||
};
|
||||
|
||||
fn bench_sort(c: &mut criterion::Criterion) {
|
||||
#[rustfmt::skip]
|
||||
let confs = &[
|
||||
utils::Conf {
|
||||
group_name: "without sort",
|
||||
sort: None,
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "sort on many different values",
|
||||
sort: Some(vec!["name:asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "sort on many similar values",
|
||||
sort: Some(vec!["timezone:desc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "sort on many similar then different values",
|
||||
sort: Some(vec!["timezone:desc", "name:asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "sort on many different then similar values",
|
||||
sort: Some(vec!["timezone:desc", "name:asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "geo sort",
|
||||
sample_size: Some(10),
|
||||
sort: Some(vec!["_geoPoint(45.4777599, 9.1967508):asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "sort on many similar values then geo sort",
|
||||
sample_size: Some(50),
|
||||
sort: Some(vec!["timezone:desc", "_geoPoint(45.4777599, 9.1967508):asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "sort on many different values then geo sort",
|
||||
sample_size: Some(50),
|
||||
sort: Some(vec!["name:desc", "_geoPoint(45.4777599, 9.1967508):asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "sort on many fields",
|
||||
sort: Some(vec!["population:asc", "name:asc", "elevation:asc", "timezone:asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
];
|
||||
|
||||
utils::run_benches(c, confs);
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_sort);
|
||||
criterion_main!(benches);
|
||||
@@ -9,6 +9,7 @@ use anyhow::Context;
|
||||
use bumpalo::Bump;
|
||||
use criterion::BenchmarkId;
|
||||
use memmap2::Mmap;
|
||||
use milli::documents::sort::recursive_sort;
|
||||
use milli::heed::EnvOpenOptions;
|
||||
use milli::progress::Progress;
|
||||
use milli::update::new::indexer;
|
||||
@@ -35,6 +36,12 @@ pub struct Conf<'a> {
|
||||
pub configure: fn(&mut Settings),
|
||||
pub filter: Option<&'a str>,
|
||||
pub sort: Option<Vec<&'a str>>,
|
||||
/// set to skip documents (offset, limit)
|
||||
pub offsets: &'a [Option<(usize, usize)>],
|
||||
/// enable if you want to bench getting documents without querying
|
||||
pub get_documents: bool,
|
||||
/// configure the benchmark sample size
|
||||
pub sample_size: Option<usize>,
|
||||
/// enable or disable the optional words on the query
|
||||
pub optional_words: bool,
|
||||
/// primary key, if there is None we'll auto-generate docids for every documents
|
||||
@@ -52,6 +59,9 @@ impl Conf<'_> {
|
||||
configure: |_| (),
|
||||
filter: None,
|
||||
sort: None,
|
||||
offsets: &[None],
|
||||
get_documents: false,
|
||||
sample_size: None,
|
||||
optional_words: true,
|
||||
primary_key: None,
|
||||
};
|
||||
@@ -113,6 +123,7 @@ pub fn base_setup(conf: &Conf) -> Index {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -145,25 +156,79 @@ pub fn run_benches(c: &mut criterion::Criterion, confs: &[Conf]) {
|
||||
let file_name = Path::new(conf.dataset).file_name().and_then(|f| f.to_str()).unwrap();
|
||||
let name = format!("{}: {}", file_name, conf.group_name);
|
||||
let mut group = c.benchmark_group(&name);
|
||||
if let Some(sample_size) = conf.sample_size {
|
||||
group.sample_size(sample_size);
|
||||
}
|
||||
|
||||
for &query in conf.queries {
|
||||
group.bench_with_input(BenchmarkId::from_parameter(query), &query, |b, &query| {
|
||||
b.iter(|| {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let mut search = index.search(&rtxn);
|
||||
search.query(query).terms_matching_strategy(TermsMatchingStrategy::default());
|
||||
if let Some(filter) = conf.filter {
|
||||
let filter = Filter::from_str(filter).unwrap().unwrap();
|
||||
search.filter(filter);
|
||||
}
|
||||
if let Some(sort) = &conf.sort {
|
||||
let sort = sort.iter().map(|sort| sort.parse().unwrap()).collect();
|
||||
search.sort_criteria(sort);
|
||||
}
|
||||
let _ids = search.execute().unwrap();
|
||||
});
|
||||
});
|
||||
for offset in conf.offsets {
|
||||
let parameter = match offset {
|
||||
None => query.to_string(),
|
||||
Some((offset, limit)) => format!("{query}[{offset}:{limit}]"),
|
||||
};
|
||||
group.bench_with_input(
|
||||
BenchmarkId::from_parameter(parameter),
|
||||
&query,
|
||||
|b, &query| {
|
||||
b.iter(|| {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let mut search = index.search(&rtxn);
|
||||
search
|
||||
.query(query)
|
||||
.terms_matching_strategy(TermsMatchingStrategy::default());
|
||||
if let Some(filter) = conf.filter {
|
||||
let filter = Filter::from_str(filter).unwrap().unwrap();
|
||||
search.filter(filter);
|
||||
}
|
||||
if let Some(sort) = &conf.sort {
|
||||
let sort = sort.iter().map(|sort| sort.parse().unwrap()).collect();
|
||||
search.sort_criteria(sort);
|
||||
}
|
||||
if let Some((offset, limit)) = offset {
|
||||
search.offset(*offset).limit(*limit);
|
||||
}
|
||||
|
||||
let _ids = search.execute().unwrap();
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if conf.get_documents {
|
||||
for offset in conf.offsets {
|
||||
let parameter = match offset {
|
||||
None => String::from("get_documents"),
|
||||
Some((offset, limit)) => format!("get_documents[{offset}:{limit}]"),
|
||||
};
|
||||
group.bench_with_input(BenchmarkId::from_parameter(parameter), &(), |b, &()| {
|
||||
b.iter(|| {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
if let Some(sort) = &conf.sort {
|
||||
let sort = sort.iter().map(|sort| sort.parse().unwrap()).collect();
|
||||
let all_docs = index.documents_ids(&rtxn).unwrap();
|
||||
let facet_sort =
|
||||
recursive_sort(&index, &rtxn, sort, &all_docs).unwrap();
|
||||
let iter = facet_sort.iter().unwrap();
|
||||
if let Some((offset, limit)) = offset {
|
||||
let _results = iter.skip(*offset).take(*limit).collect::<Vec<_>>();
|
||||
} else {
|
||||
let _results = iter.collect::<Vec<_>>();
|
||||
}
|
||||
} else {
|
||||
let all_docs = index.documents_ids(&rtxn).unwrap();
|
||||
if let Some((offset, limit)) = offset {
|
||||
let _results =
|
||||
all_docs.iter().skip(*offset).take(*limit).collect::<Vec<_>>();
|
||||
} else {
|
||||
let _results = all_docs.iter().collect::<Vec<_>>();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
group.finish();
|
||||
|
||||
index.prepare_for_closing().wait();
|
||||
|
||||
@@ -10,7 +10,7 @@ use meilisearch_types::keys::Key;
|
||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_types::settings::Unchecked;
|
||||
use meilisearch_types::tasks::{
|
||||
Details, ExportIndexSettings, IndexSwap, KindWithContent, Status, Task, TaskId,
|
||||
Details, ExportIndexSettings, IndexSwap, KindWithContent, Status, Task, TaskId, TaskNetwork,
|
||||
};
|
||||
use meilisearch_types::InstanceUid;
|
||||
use roaring::RoaringBitmap;
|
||||
@@ -94,6 +94,8 @@ pub struct TaskDump {
|
||||
default
|
||||
)]
|
||||
pub finished_at: Option<OffsetDateTime>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub network: Option<TaskNetwork>,
|
||||
}
|
||||
|
||||
// A `Kind` specific version made for the dump. If modified you may break the dump.
|
||||
@@ -171,6 +173,7 @@ impl From<Task> for TaskDump {
|
||||
enqueued_at: task.enqueued_at,
|
||||
started_at: task.started_at,
|
||||
finished_at: task.finished_at,
|
||||
network: task.network,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -250,11 +253,12 @@ pub(crate) mod test {
|
||||
use maplit::{btreemap, btreeset};
|
||||
use meilisearch_types::batches::{Batch, BatchEnqueuedAt, BatchStats};
|
||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||
use meilisearch_types::features::{Network, Remote, RuntimeTogglableFeatures};
|
||||
use meilisearch_types::features::RuntimeTogglableFeatures;
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::keys::{Action, Key};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::{self, FilterableAttributesRule};
|
||||
use meilisearch_types::network::{Network, Remote};
|
||||
use meilisearch_types::settings::{Checked, FacetingSettings, Settings};
|
||||
use meilisearch_types::task_view::DetailsView;
|
||||
use meilisearch_types::tasks::{BatchStopReason, Details, Kind, Status};
|
||||
@@ -383,6 +387,7 @@ pub(crate) mod test {
|
||||
enqueued_at: datetime!(2022-11-11 0:00 UTC),
|
||||
started_at: Some(datetime!(2022-11-20 0:00 UTC)),
|
||||
finished_at: Some(datetime!(2022-11-21 0:00 UTC)),
|
||||
network: None,
|
||||
},
|
||||
None,
|
||||
),
|
||||
@@ -407,6 +412,7 @@ pub(crate) mod test {
|
||||
enqueued_at: datetime!(2022-11-11 0:00 UTC),
|
||||
started_at: None,
|
||||
finished_at: None,
|
||||
network: None,
|
||||
},
|
||||
Some(vec![
|
||||
json!({ "id": 4, "race": "leonberg" }).as_object().unwrap().clone(),
|
||||
@@ -426,6 +432,7 @@ pub(crate) mod test {
|
||||
enqueued_at: datetime!(2022-11-15 0:00 UTC),
|
||||
started_at: None,
|
||||
finished_at: None,
|
||||
network: None,
|
||||
},
|
||||
None,
|
||||
),
|
||||
@@ -538,7 +545,8 @@ pub(crate) mod test {
|
||||
fn create_test_network() -> Network {
|
||||
Network {
|
||||
local: Some("myself".to_string()),
|
||||
remotes: maplit::btreemap! {"other".to_string() => Remote { url: "http://test".to_string(), search_api_key: Some("apiKey".to_string()) }},
|
||||
remotes: maplit::btreemap! {"other".to_string() => Remote { url: "http://test".to_string(), search_api_key: Some("apiKey".to_string()), write_api_key: Some("docApiKey".to_string()) }},
|
||||
sharding: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::fs::File;
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::v2_to_v3::CompatV2ToV3;
|
||||
@@ -94,6 +95,10 @@ impl CompatIndexV1ToV2 {
|
||||
self.from.documents().map(|it| Box::new(it) as Box<dyn Iterator<Item = _>>)
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
self.from.documents_file()
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<v2::settings::Settings<v2::settings::Checked>> {
|
||||
Ok(v2::settings::Settings::<v2::settings::Unchecked>::from(self.from.settings()?).check())
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::fs::File;
|
||||
use std::str::FromStr;
|
||||
|
||||
use time::OffsetDateTime;
|
||||
@@ -122,6 +123,13 @@ impl CompatIndexV2ToV3 {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
match self {
|
||||
CompatIndexV2ToV3::V2(v2) => v2.documents_file(),
|
||||
CompatIndexV2ToV3::Compat(compat) => compat.documents_file(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<v3::Settings<v3::Checked>> {
|
||||
let settings = match self {
|
||||
CompatIndexV2ToV3::V2(from) => from.settings()?,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::fs::File;
|
||||
|
||||
use super::v2_to_v3::{CompatIndexV2ToV3, CompatV2ToV3};
|
||||
use super::v4_to_v5::CompatV4ToV5;
|
||||
use crate::reader::{v3, v4, UpdateFile};
|
||||
@@ -252,6 +254,13 @@ impl CompatIndexV3ToV4 {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
match self {
|
||||
CompatIndexV3ToV4::V3(v3) => v3.documents_file(),
|
||||
CompatIndexV3ToV4::Compat(compat) => compat.documents_file(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<v4::Settings<v4::Checked>> {
|
||||
Ok(match self {
|
||||
CompatIndexV3ToV4::V3(v3) => {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::fs::File;
|
||||
|
||||
use super::v3_to_v4::{CompatIndexV3ToV4, CompatV3ToV4};
|
||||
use super::v5_to_v6::CompatV5ToV6;
|
||||
use crate::reader::{v4, v5, Document};
|
||||
@@ -241,6 +243,13 @@ impl CompatIndexV4ToV5 {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
match self {
|
||||
CompatIndexV4ToV5::V4(v4) => v4.documents_file(),
|
||||
CompatIndexV4ToV5::Compat(compat) => compat.documents_file(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<v5::Settings<v5::Checked>> {
|
||||
match self {
|
||||
CompatIndexV4ToV5::V4(v4) => Ok(v5::Settings::from(v4.settings()?).check()),
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::fs::File;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::str::FromStr;
|
||||
|
||||
@@ -160,6 +161,7 @@ impl CompatV5ToV6 {
|
||||
enqueued_at: task_view.enqueued_at,
|
||||
started_at: task_view.started_at,
|
||||
finished_at: task_view.finished_at,
|
||||
network: None,
|
||||
};
|
||||
|
||||
(task, content_file)
|
||||
@@ -243,6 +245,13 @@ impl CompatIndexV5ToV6 {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
match self {
|
||||
CompatIndexV5ToV6::V5(v5) => v5.documents_file(),
|
||||
CompatIndexV5ToV6::Compat(compat) => compat.documents_file(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<v6::Settings<v6::Checked>> {
|
||||
match self {
|
||||
CompatIndexV5ToV6::V5(v5) => Ok(v6::Settings::from(v5.settings()?).check()),
|
||||
|
||||
@@ -192,6 +192,14 @@ impl DumpIndexReader {
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to a file in the NDJSON format containing all the documents of the index
|
||||
pub fn documents_file(&self) -> &File {
|
||||
match self {
|
||||
DumpIndexReader::Current(v6) => v6.documents_file(),
|
||||
DumpIndexReader::Compat(compat) => compat.documents_file(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<v6::Settings<v6::Checked>> {
|
||||
match self {
|
||||
DumpIndexReader::Current(v6) => v6.settings(),
|
||||
|
||||
@@ -72,6 +72,10 @@ impl V1IndexReader {
|
||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }))
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
self.documents.get_ref()
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<self::settings::Settings> {
|
||||
Ok(serde_json::from_reader(&mut self.settings)?)
|
||||
}
|
||||
|
||||
@@ -203,6 +203,10 @@ impl V2IndexReader {
|
||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }))
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
self.documents.get_ref()
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<Settings<Checked>> {
|
||||
Ok(self.settings.clone())
|
||||
}
|
||||
|
||||
@@ -215,6 +215,10 @@ impl V3IndexReader {
|
||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }))
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
self.documents.get_ref()
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<Settings<Checked>> {
|
||||
Ok(self.settings.clone())
|
||||
}
|
||||
|
||||
@@ -210,6 +210,10 @@ impl V4IndexReader {
|
||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }))
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
self.documents.get_ref()
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<Settings<Checked>> {
|
||||
Ok(self.settings.clone())
|
||||
}
|
||||
|
||||
@@ -247,6 +247,10 @@ impl V5IndexReader {
|
||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }))
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
self.documents.get_ref()
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<Settings<Checked>> {
|
||||
Ok(self.settings.clone())
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ pub type Batch = meilisearch_types::batches::Batch;
|
||||
pub type Key = meilisearch_types::keys::Key;
|
||||
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
|
||||
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
||||
pub type Network = meilisearch_types::features::Network;
|
||||
pub type Network = meilisearch_types::network::Network;
|
||||
|
||||
// ===== Other types to clarify the code of the compat module
|
||||
// everything related to the tasks
|
||||
@@ -284,6 +284,10 @@ impl V6IndexReader {
|
||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }))
|
||||
}
|
||||
|
||||
pub fn documents_file(&self) -> &File {
|
||||
self.documents.get_ref()
|
||||
}
|
||||
|
||||
pub fn settings(&mut self) -> Result<Settings<Checked>> {
|
||||
let mut settings: Settings<Unchecked> = serde_json::from_reader(&mut self.settings)?;
|
||||
patch_embedders(&mut settings);
|
||||
|
||||
@@ -5,8 +5,9 @@ use std::path::PathBuf;
|
||||
use flate2::write::GzEncoder;
|
||||
use flate2::Compression;
|
||||
use meilisearch_types::batches::Batch;
|
||||
use meilisearch_types::features::{ChatCompletionSettings, Network, RuntimeTogglableFeatures};
|
||||
use meilisearch_types::features::{ChatCompletionSettings, RuntimeTogglableFeatures};
|
||||
use meilisearch_types::keys::Key;
|
||||
use meilisearch_types::network::Network;
|
||||
use meilisearch_types::settings::{Checked, Settings};
|
||||
use serde_json::{Map, Value};
|
||||
use tempfile::TempDir;
|
||||
|
||||
@@ -148,11 +148,10 @@ impl File {
|
||||
Ok(Self { path: PathBuf::new(), file: None })
|
||||
}
|
||||
|
||||
pub fn persist(self) -> Result<()> {
|
||||
if let Some(file) = self.file {
|
||||
file.persist(&self.path)?;
|
||||
}
|
||||
Ok(())
|
||||
pub fn persist(self) -> Result<Option<StdFile>> {
|
||||
let Some(file) = self.file else { return Ok(None) };
|
||||
|
||||
Ok(Some(file.persist(&self.path)?))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -129,6 +129,7 @@ fn main() {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ flate2 = "1.1.2"
|
||||
indexmap = "2.9.0"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
memmap2 = "0.9.5"
|
||||
memmap2 = "0.9.7"
|
||||
page_size = "0.6.0"
|
||||
rayon = "1.10.0"
|
||||
roaring = { version = "0.10.12", features = ["serde"] }
|
||||
|
||||
@@ -147,6 +147,7 @@ impl<'a> Dump<'a> {
|
||||
canceled_by: task.canceled_by,
|
||||
details: task.details,
|
||||
status: task.status,
|
||||
network: task.network,
|
||||
kind: match task.kind {
|
||||
KindDump::DocumentImport {
|
||||
primary_key,
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use meilisearch_types::features::{InstanceTogglableFeatures, Network, RuntimeTogglableFeatures};
|
||||
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
|
||||
use meilisearch_types::heed::types::{SerdeJson, Str};
|
||||
use meilisearch_types::heed::{Database, Env, RwTxn, WithoutTls};
|
||||
use meilisearch_types::network::Network;
|
||||
|
||||
use crate::error::FeatureNotEnabledError;
|
||||
use crate::Result;
|
||||
|
||||
@@ -20,6 +20,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
||||
|
||||
let IndexScheduler {
|
||||
cleanup_enabled: _,
|
||||
experimental_no_edition_2024_for_dumps: _,
|
||||
processing_tasks,
|
||||
env,
|
||||
version,
|
||||
@@ -212,6 +213,7 @@ pub fn snapshot_task(task: &Task) -> String {
|
||||
details,
|
||||
status,
|
||||
kind,
|
||||
network,
|
||||
} = task;
|
||||
snap.push('{');
|
||||
snap.push_str(&format!("uid: {uid}, "));
|
||||
@@ -229,6 +231,9 @@ pub fn snapshot_task(task: &Task) -> String {
|
||||
snap.push_str(&format!("details: {}, ", &snapshot_details(details)));
|
||||
}
|
||||
snap.push_str(&format!("kind: {kind:?}"));
|
||||
if let Some(network) = network {
|
||||
snap.push_str(&format!("network: {network:?}, "))
|
||||
}
|
||||
|
||||
snap.push('}');
|
||||
snap
|
||||
|
||||
@@ -52,7 +52,7 @@ use flate2::bufread::GzEncoder;
|
||||
use flate2::Compression;
|
||||
use meilisearch_types::batches::Batch;
|
||||
use meilisearch_types::features::{
|
||||
ChatCompletionSettings, InstanceTogglableFeatures, Network, RuntimeTogglableFeatures,
|
||||
ChatCompletionSettings, InstanceTogglableFeatures, RuntimeTogglableFeatures,
|
||||
};
|
||||
use meilisearch_types::heed::byteorder::BE;
|
||||
use meilisearch_types::heed::types::{DecodeIgnore, SerdeJson, Str, I128};
|
||||
@@ -63,8 +63,9 @@ use meilisearch_types::milli::vector::{
|
||||
Embedder, EmbedderOptions, RuntimeEmbedder, RuntimeEmbedders, RuntimeFragment,
|
||||
};
|
||||
use meilisearch_types::milli::{self, Index};
|
||||
use meilisearch_types::network::Network;
|
||||
use meilisearch_types::task_view::TaskView;
|
||||
use meilisearch_types::tasks::{KindWithContent, Task};
|
||||
use meilisearch_types::tasks::{KindWithContent, Task, TaskNetwork};
|
||||
use milli::vector::db::IndexEmbeddingConfig;
|
||||
use processing::ProcessingTasks;
|
||||
pub use queue::Query;
|
||||
@@ -168,6 +169,9 @@ pub struct IndexScheduler {
|
||||
/// Whether we should automatically cleanup the task queue or not.
|
||||
pub(crate) cleanup_enabled: bool,
|
||||
|
||||
/// Whether we should use the old document indexer or the new one.
|
||||
pub(crate) experimental_no_edition_2024_for_dumps: bool,
|
||||
|
||||
/// The webhook url we should send tasks to after processing every batches.
|
||||
pub(crate) webhook_url: Option<String>,
|
||||
/// The Authorization header to send to the webhook URL.
|
||||
@@ -210,6 +214,7 @@ impl IndexScheduler {
|
||||
|
||||
index_mapper: self.index_mapper.clone(),
|
||||
cleanup_enabled: self.cleanup_enabled,
|
||||
experimental_no_edition_2024_for_dumps: self.experimental_no_edition_2024_for_dumps,
|
||||
webhook_url: self.webhook_url.clone(),
|
||||
webhook_authorization_header: self.webhook_authorization_header.clone(),
|
||||
embedders: self.embedders.clone(),
|
||||
@@ -296,6 +301,9 @@ impl IndexScheduler {
|
||||
index_mapper,
|
||||
env,
|
||||
cleanup_enabled: options.cleanup_enabled,
|
||||
experimental_no_edition_2024_for_dumps: options
|
||||
.indexer_config
|
||||
.experimental_no_edition_2024_for_dumps,
|
||||
webhook_url: options.webhook_url,
|
||||
webhook_authorization_header: options.webhook_authorization_header,
|
||||
embedders: Default::default(),
|
||||
@@ -594,6 +602,11 @@ impl IndexScheduler {
|
||||
Ok(nbr_index_processing_tasks > 0)
|
||||
}
|
||||
|
||||
/// Whether the index should use the old document indexer.
|
||||
pub fn no_edition_2024_for_dumps(&self) -> bool {
|
||||
self.experimental_no_edition_2024_for_dumps
|
||||
}
|
||||
|
||||
/// Return the tasks matching the query from the user's point of view along
|
||||
/// with the total number of tasks matching the query, ignoring from and limit.
|
||||
///
|
||||
@@ -632,6 +645,16 @@ impl IndexScheduler {
|
||||
self.queue.get_task_ids_from_authorized_indexes(&rtxn, query, filters, &processing)
|
||||
}
|
||||
|
||||
pub fn set_task_network(&self, task_id: TaskId, network: TaskNetwork) -> Result<()> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let mut task =
|
||||
self.queue.tasks.get_task(&wtxn, task_id)?.ok_or(Error::TaskNotFound(task_id))?;
|
||||
task.network = Some(network);
|
||||
self.queue.tasks.all_tasks.put(&mut wtxn, &task_id, &task)?;
|
||||
wtxn.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Return the batches matching the query from the user's point of view along
|
||||
/// with the total number of batches matching the query, ignoring from and limit.
|
||||
///
|
||||
|
||||
@@ -279,6 +279,7 @@ impl Queue {
|
||||
details: kind.default_details(),
|
||||
status: Status::Enqueued,
|
||||
kind: kind.clone(),
|
||||
network: None,
|
||||
};
|
||||
// For deletion and cancelation tasks, we want to make extra sure that they
|
||||
// don't attempt to delete/cancel tasks that are newer than themselves.
|
||||
|
||||
@@ -97,7 +97,22 @@ impl TaskQueue {
|
||||
Ok(self.all_tasks.get(rtxn, &task_id)?)
|
||||
}
|
||||
|
||||
pub(crate) fn update_task(&self, wtxn: &mut RwTxn, task: &Task) -> Result<()> {
|
||||
/// Update the inverted task indexes and write the new value of the task.
|
||||
///
|
||||
/// The passed `task` object typically comes from a previous transaction, so two kinds of modification might have occurred:
|
||||
/// 1. Modification to the `task` object after loading it from the DB (the purpose of this method is to persist these changes)
|
||||
/// 2. Modification to the task committed by another transaction in the DB (an annoying consequence of having lost the original
|
||||
/// transaction from which the `task` instance was deserialized)
|
||||
///
|
||||
/// When calling this function, this `task` is modified to take into account any existing `network`
|
||||
/// that can have been added since the task was loaded into memory.
|
||||
///
|
||||
/// Any other modification to the task that was committed from the DB since the parameter was pulled from the DB will be overwritten.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// - CorruptedTaskQueue: The task doesn't exist in the database
|
||||
pub(crate) fn update_task(&self, wtxn: &mut RwTxn, task: &mut Task) -> Result<()> {
|
||||
let old_task = self.get_task(wtxn, task.uid)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
let reprocessing = old_task.status != Status::Enqueued;
|
||||
|
||||
@@ -157,6 +172,12 @@ impl TaskQueue {
|
||||
}
|
||||
}
|
||||
|
||||
task.network = match (old_task.network, task.network.take()) {
|
||||
(None, None) => None,
|
||||
(None, Some(network)) | (Some(network), None) => Some(network),
|
||||
(Some(_), Some(network)) => Some(network),
|
||||
};
|
||||
|
||||
self.all_tasks.put(wtxn, &task.uid, task)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -268,7 +268,7 @@ impl IndexScheduler {
|
||||
|
||||
self.queue
|
||||
.tasks
|
||||
.update_task(&mut wtxn, &task)
|
||||
.update_task(&mut wtxn, &mut task)
|
||||
.map_err(|e| Error::UnrecoverableError(Box::new(e)))?;
|
||||
}
|
||||
if let Some(canceled_by) = canceled_by {
|
||||
@@ -349,7 +349,7 @@ impl IndexScheduler {
|
||||
|
||||
self.queue
|
||||
.tasks
|
||||
.update_task(&mut wtxn, &task)
|
||||
.update_task(&mut wtxn, &mut task)
|
||||
.map_err(|e| Error::UnrecoverableError(Box::new(e)))?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::sync::atomic::Ordering;
|
||||
|
||||
use dump::IndexMetadata;
|
||||
use meilisearch_types::milli::constants::RESERVED_VECTORS_FIELD_NAME;
|
||||
use meilisearch_types::milli::index::EmbeddingsWithMetadata;
|
||||
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
|
||||
use meilisearch_types::milli::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors};
|
||||
use meilisearch_types::milli::{self};
|
||||
@@ -227,12 +228,21 @@ impl IndexScheduler {
|
||||
return Err(Error::from_milli(user_err, Some(uid.to_string())));
|
||||
};
|
||||
|
||||
for (embedder_name, (embeddings, regenerate)) in embeddings {
|
||||
for (
|
||||
embedder_name,
|
||||
EmbeddingsWithMetadata { embeddings, regenerate, has_fragments },
|
||||
) in embeddings
|
||||
{
|
||||
let embeddings = ExplicitVectors {
|
||||
embeddings: Some(VectorOrArrayOfVectors::from_array_of_vectors(
|
||||
embeddings,
|
||||
)),
|
||||
regenerate,
|
||||
regenerate: regenerate &&
|
||||
// Meilisearch does not handle well dumps with fragments, because as the fragments
|
||||
// are marked as user-provided,
|
||||
// all embeddings would be regenerated on any settings change or document update.
|
||||
// To prevent this, we mark embeddings has non regenerate in this case.
|
||||
!has_fragments,
|
||||
};
|
||||
vectors.insert(embedder_name, serde_json::to_value(embeddings).unwrap());
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use flate2::write::GzEncoder;
|
||||
use flate2::Compression;
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::milli::constants::RESERVED_VECTORS_FIELD_NAME;
|
||||
use meilisearch_types::milli::index::EmbeddingsWithMetadata;
|
||||
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
|
||||
use meilisearch_types::milli::update::{request_threads, Setting};
|
||||
use meilisearch_types::milli::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors};
|
||||
@@ -62,13 +63,14 @@ impl IndexScheduler {
|
||||
let ExportIndexSettings { filter, override_settings } = export_settings;
|
||||
let index = self.index(uid)?;
|
||||
let index_rtxn = index.read_txn()?;
|
||||
let bearer = api_key.map(|api_key| format!("Bearer {api_key}"));
|
||||
|
||||
// First, check if the index already exists
|
||||
let url = format!("{base_url}/indexes/{uid}");
|
||||
let response = retry(&must_stop_processing, || {
|
||||
let mut request = agent.get(&url);
|
||||
if let Some(api_key) = api_key {
|
||||
request = request.set("Authorization", &format!("Bearer {api_key}"));
|
||||
if let Some(bearer) = &bearer {
|
||||
request = request.set("Authorization", bearer);
|
||||
}
|
||||
|
||||
request.send_bytes(Default::default()).map_err(into_backoff_error)
|
||||
@@ -90,8 +92,8 @@ impl IndexScheduler {
|
||||
let url = format!("{base_url}/indexes");
|
||||
retry(&must_stop_processing, || {
|
||||
let mut request = agent.post(&url);
|
||||
if let Some(api_key) = api_key {
|
||||
request = request.set("Authorization", &format!("Bearer {api_key}"));
|
||||
if let Some(bearer) = &bearer {
|
||||
request = request.set("Authorization", bearer);
|
||||
}
|
||||
let index_param = json!({ "uid": uid, "primaryKey": primary_key });
|
||||
request.send_json(&index_param).map_err(into_backoff_error)
|
||||
@@ -103,8 +105,8 @@ impl IndexScheduler {
|
||||
let url = format!("{base_url}/indexes/{uid}");
|
||||
retry(&must_stop_processing, || {
|
||||
let mut request = agent.patch(&url);
|
||||
if let Some(api_key) = api_key {
|
||||
request = request.set("Authorization", &format!("Bearer {api_key}"));
|
||||
if let Some(bearer) = &bearer {
|
||||
request = request.set("Authorization", bearer);
|
||||
}
|
||||
let index_param = json!({ "primaryKey": primary_key });
|
||||
request.send_json(&index_param).map_err(into_backoff_error)
|
||||
@@ -122,7 +124,6 @@ impl IndexScheduler {
|
||||
}
|
||||
// Retry logic for sending settings
|
||||
let url = format!("{base_url}/indexes/{uid}/settings");
|
||||
let bearer = api_key.map(|api_key| format!("Bearer {api_key}"));
|
||||
retry(&must_stop_processing, || {
|
||||
let mut request = agent.patch(&url);
|
||||
if let Some(bearer) = bearer.as_ref() {
|
||||
@@ -167,10 +168,10 @@ impl IndexScheduler {
|
||||
},
|
||||
);
|
||||
|
||||
let limit = payload_size.map(|ps| ps.as_u64() as usize).unwrap_or(50 * 1024 * 1024); // defaults to 50 MiB
|
||||
let limit = payload_size.map(|ps| ps.as_u64() as usize).unwrap_or(20 * 1024 * 1024); // defaults to 20 MiB
|
||||
let documents_url = format!("{base_url}/indexes/{uid}/documents");
|
||||
|
||||
request_threads()
|
||||
let results = request_threads()
|
||||
.broadcast(|ctx| {
|
||||
let index_rtxn = index
|
||||
.read_txn()
|
||||
@@ -229,12 +230,21 @@ impl IndexScheduler {
|
||||
));
|
||||
};
|
||||
|
||||
for (embedder_name, (embeddings, regenerate)) in embeddings {
|
||||
for (
|
||||
embedder_name,
|
||||
EmbeddingsWithMetadata { embeddings, regenerate, has_fragments },
|
||||
) in embeddings
|
||||
{
|
||||
let embeddings = ExplicitVectors {
|
||||
embeddings: Some(
|
||||
VectorOrArrayOfVectors::from_array_of_vectors(embeddings),
|
||||
),
|
||||
regenerate,
|
||||
regenerate: regenerate &&
|
||||
// Meilisearch does not handle well dumps with fragments, because as the fragments
|
||||
// are marked as user-provided,
|
||||
// all embeddings would be regenerated on any settings change or document update.
|
||||
// To prevent this, we mark embeddings has non regenerate in this case.
|
||||
!has_fragments,
|
||||
};
|
||||
vectors.insert(
|
||||
embedder_name,
|
||||
@@ -265,9 +275,8 @@ impl IndexScheduler {
|
||||
let mut request = agent.post(&documents_url);
|
||||
request = request.set("Content-Type", "application/x-ndjson");
|
||||
request = request.set("Content-Encoding", "gzip");
|
||||
if let Some(api_key) = api_key {
|
||||
request = request
|
||||
.set("Authorization", &(format!("Bearer {api_key}")));
|
||||
if let Some(bearer) = &bearer {
|
||||
request = request.set("Authorization", bearer);
|
||||
}
|
||||
request.send_bytes(&compressed_buffer).map_err(into_backoff_error)
|
||||
})?;
|
||||
@@ -276,7 +285,7 @@ impl IndexScheduler {
|
||||
}
|
||||
buffer.extend_from_slice(&tmp_buffer);
|
||||
|
||||
if i % 100 == 0 {
|
||||
if i > 0 && i % 100 == 0 {
|
||||
step.fetch_add(100, atomic::Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
@@ -284,8 +293,8 @@ impl IndexScheduler {
|
||||
retry(&must_stop_processing, || {
|
||||
let mut request = agent.post(&documents_url);
|
||||
request = request.set("Content-Type", "application/x-ndjson");
|
||||
if let Some(api_key) = api_key {
|
||||
request = request.set("Authorization", &(format!("Bearer {api_key}")));
|
||||
if let Some(bearer) = &bearer {
|
||||
request = request.set("Authorization", bearer);
|
||||
}
|
||||
request.send_bytes(&buffer).map_err(into_backoff_error)
|
||||
})?;
|
||||
@@ -298,6 +307,9 @@ impl IndexScheduler {
|
||||
Some(uid.to_string()),
|
||||
)
|
||||
})?;
|
||||
for result in results {
|
||||
result?;
|
||||
}
|
||||
|
||||
step.store(total_documents, atomic::Ordering::Relaxed);
|
||||
}
|
||||
|
||||
@@ -66,6 +66,11 @@ impl IndexScheduler {
|
||||
}
|
||||
IndexOperation::DocumentOperation { index_uid, primary_key, operations, mut tasks } => {
|
||||
progress.update_progress(DocumentOperationProgress::RetrievingConfig);
|
||||
|
||||
let network = self.network();
|
||||
|
||||
let shards = network.shards();
|
||||
|
||||
// TODO: at some point, for better efficiency we might want to reuse the bumpalo for successive batches.
|
||||
// this is made difficult by the fact we're doing private clones of the index scheduler and sending it
|
||||
// to a fresh thread.
|
||||
@@ -130,6 +135,7 @@ impl IndexScheduler {
|
||||
&mut new_fields_ids_map,
|
||||
&|| must_stop_processing.get(),
|
||||
progress.clone(),
|
||||
shards.as_ref(),
|
||||
)
|
||||
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?;
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::collections::BTreeMap;
|
||||
use big_s::S;
|
||||
use insta::assert_json_snapshot;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch_types::milli::index::EmbeddingsWithMetadata;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::vector::settings::EmbeddingSettings;
|
||||
use meilisearch_types::milli::vector::SearchQuery;
|
||||
@@ -220,8 +221,8 @@ fn import_vectors() {
|
||||
|
||||
let embeddings = index.embeddings(&rtxn, 0).unwrap();
|
||||
|
||||
assert_json_snapshot!(embeddings[&simple_hf_name].0[0] == lab_embed, @"true");
|
||||
assert_json_snapshot!(embeddings[&fakerest_name].0[0] == beagle_embed, @"true");
|
||||
assert_json_snapshot!(embeddings[&simple_hf_name].embeddings[0] == lab_embed, @"true");
|
||||
assert_json_snapshot!(embeddings[&fakerest_name].embeddings[0] == beagle_embed, @"true");
|
||||
|
||||
let doc = index.documents(&rtxn, std::iter::once(0)).unwrap()[0].1;
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
@@ -311,9 +312,9 @@ fn import_vectors() {
|
||||
let embeddings = index.embeddings(&rtxn, 0).unwrap();
|
||||
|
||||
// automatically changed to patou because set to regenerate
|
||||
assert_json_snapshot!(embeddings[&simple_hf_name].0[0] == patou_embed, @"true");
|
||||
assert_json_snapshot!(embeddings[&simple_hf_name].embeddings[0] == patou_embed, @"true");
|
||||
// remained beagle
|
||||
assert_json_snapshot!(embeddings[&fakerest_name].0[0] == beagle_embed, @"true");
|
||||
assert_json_snapshot!(embeddings[&fakerest_name].embeddings[0] == beagle_embed, @"true");
|
||||
|
||||
let doc = index.documents(&rtxn, std::iter::once(0)).unwrap()[0].1;
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
@@ -497,13 +498,13 @@ fn import_vectors_first_and_embedder_later() {
|
||||
|
||||
let docid = index.external_documents_ids.get(&rtxn, "0").unwrap().unwrap();
|
||||
let embeddings = index.embeddings(&rtxn, docid).unwrap();
|
||||
let (embedding, _) = &embeddings["my_doggo_embedder"];
|
||||
assert!(!embedding.is_empty(), "{embedding:?}");
|
||||
let EmbeddingsWithMetadata { embeddings, .. } = &embeddings["my_doggo_embedder"];
|
||||
assert!(!embeddings.is_empty(), "{embeddings:?}");
|
||||
|
||||
// the document with the id 3 should keep its original embedding
|
||||
let docid = index.external_documents_ids.get(&rtxn, "3").unwrap().unwrap();
|
||||
let embeddings = index.embeddings(&rtxn, docid).unwrap();
|
||||
let (embeddings, _) = &embeddings["my_doggo_embedder"];
|
||||
let EmbeddingsWithMetadata { embeddings, .. } = &embeddings["my_doggo_embedder"];
|
||||
|
||||
snapshot!(embeddings.len(), @"1");
|
||||
assert!(embeddings[0].iter().all(|i| *i == 3.0), "{:?}", embeddings[0]);
|
||||
@@ -558,7 +559,7 @@ fn import_vectors_first_and_embedder_later() {
|
||||
"###);
|
||||
|
||||
let embeddings = index.embeddings(&rtxn, docid).unwrap();
|
||||
let (embedding, _) = &embeddings["my_doggo_embedder"];
|
||||
let EmbeddingsWithMetadata { embeddings: embedding, .. } = &embeddings["my_doggo_embedder"];
|
||||
|
||||
assert!(!embedding.is_empty());
|
||||
assert!(!embedding[0].iter().all(|i| *i == 3.0), "{:?}", embedding[0]);
|
||||
@@ -566,7 +567,7 @@ fn import_vectors_first_and_embedder_later() {
|
||||
// the document with the id 4 should generate an embedding
|
||||
let docid = index.external_documents_ids.get(&rtxn, "4").unwrap().unwrap();
|
||||
let embeddings = index.embeddings(&rtxn, docid).unwrap();
|
||||
let (embedding, _) = &embeddings["my_doggo_embedder"];
|
||||
let EmbeddingsWithMetadata { embeddings: embedding, .. } = &embeddings["my_doggo_embedder"];
|
||||
|
||||
assert!(!embedding.is_empty());
|
||||
}
|
||||
@@ -696,7 +697,7 @@ fn delete_document_containing_vector() {
|
||||
"###);
|
||||
let docid = index.external_documents_ids.get(&rtxn, "0").unwrap().unwrap();
|
||||
let embeddings = index.embeddings(&rtxn, docid).unwrap();
|
||||
let (embedding, _) = &embeddings["manual"];
|
||||
let EmbeddingsWithMetadata { embeddings: embedding, .. } = &embeddings["manual"];
|
||||
assert!(!embedding.is_empty(), "{embedding:?}");
|
||||
|
||||
index_scheduler
|
||||
|
||||
@@ -88,6 +88,7 @@ pub fn upgrade_index_scheduler(
|
||||
details: Some(Details::UpgradeDatabase { from, to }),
|
||||
status: Status::Enqueued,
|
||||
kind: KindWithContent::UpgradeDatabase { from },
|
||||
network: None,
|
||||
},
|
||||
)?;
|
||||
wtxn.commit()?;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//! Utility functions on the DBs. Mainly getter and setters.
|
||||
|
||||
use crate::milli::progress::EmbedderStats;
|
||||
use std::collections::{BTreeSet, HashSet};
|
||||
use std::ops::Bound;
|
||||
use std::sync::Arc;
|
||||
@@ -15,6 +14,7 @@ use meilisearch_types::tasks::{
|
||||
use roaring::RoaringBitmap;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::milli::progress::EmbedderStats;
|
||||
use crate::{Error, Result, Task, TaskId, BEI128};
|
||||
|
||||
/// This structure contains all the information required to write a batch in the database without reading the tasks.
|
||||
@@ -372,6 +372,7 @@ impl crate::IndexScheduler {
|
||||
details,
|
||||
status,
|
||||
kind,
|
||||
network: _,
|
||||
} = task;
|
||||
assert_eq!(uid, task.uid);
|
||||
if task.status != Status::Enqueued {
|
||||
|
||||
@@ -158,7 +158,7 @@ impl AuthController {
|
||||
self.store.delete_all_keys()
|
||||
}
|
||||
|
||||
/// Delete all the keys in the DB.
|
||||
/// Insert a key directly into the store.
|
||||
pub fn raw_insert_key(&mut self, key: Key) -> Result<()> {
|
||||
self.store.put_api_key(key)?;
|
||||
Ok(())
|
||||
@@ -351,6 +351,7 @@ pub struct IndexSearchRules {
|
||||
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
store.put_api_key(Key::default_chat())?;
|
||||
store.put_api_key(Key::default_read_only_admin())?;
|
||||
store.put_api_key(Key::default_admin())?;
|
||||
store.put_api_key(Key::default_search())?;
|
||||
|
||||
|
||||
@@ -88,7 +88,13 @@ impl HeedAuthStore {
|
||||
let mut actions = HashSet::new();
|
||||
for action in &key.actions {
|
||||
match action {
|
||||
Action::All => actions.extend(enum_iterator::all::<Action>()),
|
||||
Action::All => {
|
||||
actions.extend(enum_iterator::all::<Action>());
|
||||
actions.remove(&Action::AllGet);
|
||||
}
|
||||
Action::AllGet => {
|
||||
actions.extend(enum_iterator::all::<Action>().filter(|a| a.is_read()))
|
||||
}
|
||||
Action::DocumentsAll => {
|
||||
actions.extend(
|
||||
[Action::DocumentsGet, Action::DocumentsDelete, Action::DocumentsAdd]
|
||||
|
||||
@@ -24,7 +24,7 @@ enum-iterator = "2.1.0"
|
||||
file-store = { path = "../file-store" }
|
||||
flate2 = "1.1.2"
|
||||
fst = "0.4.7"
|
||||
memmap2 = "0.9.5"
|
||||
memmap2 = "0.9.7"
|
||||
milli = { path = "../milli" }
|
||||
roaring = { version = "0.10.12", features = ["serde"] }
|
||||
rustc-hash = "2.1.1"
|
||||
|
||||
@@ -236,8 +236,11 @@ InvalidDocumentFields , InvalidRequest , BAD_REQU
|
||||
InvalidDocumentRetrieveVectors , InvalidRequest , BAD_REQUEST ;
|
||||
MissingDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
||||
MissingDocumentEditionFunction , InvalidRequest , BAD_REQUEST ;
|
||||
InconsistentDocumentChangeHeaders , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentSort , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidHeaderValue , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidVectorDimensions , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidVectorsType , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
|
||||
@@ -266,7 +269,9 @@ InvalidMultiSearchRemote , InvalidRequest , BAD_REQU
|
||||
InvalidMultiSearchWeight , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidNetworkRemotes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidNetworkSelf , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidNetworkSharding , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidNetworkSearchApiKey , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidNetworkWriteApiKey , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidNetworkUrl , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchAttributesToSearchOn , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
|
||||
@@ -415,6 +420,7 @@ InvalidChatCompletionPrompts , InvalidRequest , BAD_REQU
|
||||
InvalidChatCompletionSystemPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionSearchDescriptionPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionSearchQueryParamPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionSearchFilterParamPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionSearchIndexUidParamPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionPreQueryPrompt , InvalidRequest , BAD_REQUEST
|
||||
}
|
||||
@@ -476,7 +482,8 @@ impl ErrorCode for milli::Error {
|
||||
UserError::InvalidDistinctAttribute { .. } => Code::InvalidSearchDistinct,
|
||||
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
||||
UserError::InvalidFacetsDistribution { .. } => Code::InvalidSearchFacets,
|
||||
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
||||
UserError::InvalidSearchSortableAttribute { .. } => Code::InvalidSearchSort,
|
||||
UserError::InvalidDocumentSortableAttribute { .. } => Code::InvalidDocumentSort,
|
||||
UserError::InvalidSearchableAttribute { .. } => {
|
||||
Code::InvalidSearchAttributesToSearchOn
|
||||
}
|
||||
@@ -492,7 +499,8 @@ impl ErrorCode for milli::Error {
|
||||
UserError::InvalidVectorsMapType { .. }
|
||||
| UserError::InvalidVectorsEmbedderConf { .. } => Code::InvalidVectorsType,
|
||||
UserError::TooManyVectors(_, _) => Code::TooManyVectors,
|
||||
UserError::SortError(_) => Code::InvalidSearchSort,
|
||||
UserError::SortError { search: true, .. } => Code::InvalidSearchSort,
|
||||
UserError::SortError { search: false, .. } => Code::InvalidDocumentSort,
|
||||
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
||||
Code::InvalidSettingsTypoTolerance
|
||||
}
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::{Code, ResponseError};
|
||||
|
||||
pub const DEFAULT_CHAT_SYSTEM_PROMPT: &str = "You are a highly capable research assistant with access to powerful search tools. IMPORTANT INSTRUCTIONS:1. When answering questions, you MUST make multiple tool calls (at least 2-3) to gather comprehensive information.2. Use different search queries for each tool call - vary keywords, rephrase questions, and explore different semantic angles to ensure broad coverage.3. Always explicitly announce BEFORE making each tool call by saying: \"I'll search for [specific information] now.\"4. Combine information from ALL tool calls to provide complete, nuanced answers rather than relying on a single source.5. For complex topics, break down your research into multiple targeted queries rather than using a single generic search.";
|
||||
pub const DEFAULT_CHAT_SYSTEM_PROMPT: &str = "You are a highly capable research assistant with access to powerful search tools. IMPORTANT INSTRUCTIONS:1. When answering questions, you MUST make multiple tool calls (at least 2-3) to gather comprehensive information.2. Use different search queries for each tool call - vary keywords, rephrase questions, and explore different semantic angles to ensure broad coverage.3. Always explicitly announce BEFORE making each tool call by saying: \"I'll search for [specific information] now.\"4. Combine information from ALL tool calls to provide complete, nuanced answers rather than relying on a single source.5. For complex topics, break down your research into multiple targeted queries rather than using a single generic search. Meilisearch doesn't use the colon (:) syntax to filter but rather the equal (=) one. Separate filters from query and keep the q parameter empty if needed. Same for the filter parameter: keep it empty if need be. If you need to find documents that CONTAINS keywords simply put the keywords in the q parameter do no use a filter for this purpose. Whenever you get an error, read the error message and fix your error. ";
|
||||
pub const DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT: &str =
|
||||
"Search the database for relevant JSON documents using an optional query.";
|
||||
"Query: 'best story about Rust before 2018' with year: 2018, 2020, 2021\nlabel: analysis, golang, javascript\ntype: story, link\nvote: 300, 298, 278\n: {\"q\": \"\", \"filter\": \"category = Rust AND type = story AND year < 2018 AND vote > 100\"}\nQuery: 'A black or green car that can go fast with red brakes' with maxspeed_kmh: 200, 150, 130\ncolor: black, grey, red, green\nbrand: Toyota, Renault, Jeep, Ferrari\n: {\"q\": \"red brakes\", \"filter\": \"maxspeed_kmh > 150 AND color IN ['black', green]\"}\nQuery: 'Superman movie released in 2018 or after' with year: 2018, 2020, 2021\ngenres: Drama, Comedy, Adventure, Fiction\n: {\"q\":\"Superman\",\"filter\":\"genres IN [Adventure, Fiction] AND year >= 2018\"}";
|
||||
pub const DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT: &str = "The search query string used to find relevant documents in the index. This should contain keywords or phrases that best represent what the user is looking for. More specific queries will yield more precise results.";
|
||||
pub const DEFAULT_CHAT_SEARCH_FILTER_PARAM_PROMPT: &str = "The search filter string used to find relevant documents in the index. It supports parentheses, `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox`. Here is an example: \"price > 100 AND category = 'electronics'\". The following is a list of fields that can be filtered on: ";
|
||||
pub const DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT: &str = "The name of the index to search within. An index is a collection of documents organized for search. Selecting the right index ensures the most relevant results for the user query.";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
|
||||
@@ -31,23 +30,6 @@ pub struct InstanceTogglableFeatures {
|
||||
pub contains_filter: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Remote {
|
||||
pub url: String,
|
||||
#[serde(default)]
|
||||
pub search_api_key: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Network {
|
||||
#[serde(default, rename = "self")]
|
||||
pub local: Option<String>,
|
||||
#[serde(default)]
|
||||
pub remotes: BTreeMap<String, Remote>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ChatCompletionSettings {
|
||||
@@ -161,18 +143,31 @@ impl ChatCompletionSource {
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ChatCompletionPrompts {
|
||||
#[serde(default)]
|
||||
pub system: String,
|
||||
#[serde(default)]
|
||||
pub search_description: String,
|
||||
#[serde(default)]
|
||||
pub search_q_param: String,
|
||||
#[serde(default = "default_search_filter_param")]
|
||||
pub search_filter_param: String,
|
||||
#[serde(default)]
|
||||
pub search_index_uid_param: String,
|
||||
}
|
||||
|
||||
/// This function is used for when the search_filter_param is
|
||||
/// not provided and this can happen when the database is in v1.15.
|
||||
fn default_search_filter_param() -> String {
|
||||
DEFAULT_CHAT_SEARCH_FILTER_PARAM_PROMPT.to_string()
|
||||
}
|
||||
|
||||
impl Default for ChatCompletionPrompts {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
system: DEFAULT_CHAT_SYSTEM_PROMPT.to_string(),
|
||||
search_description: DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT.to_string(),
|
||||
search_q_param: DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT.to_string(),
|
||||
search_filter_param: DEFAULT_CHAT_SEARCH_FILTER_PARAM_PROMPT.to_string(),
|
||||
search_index_uid_param: DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -144,6 +144,21 @@ impl Key {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_read_only_admin() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
name: Some("Default Read-Only Admin API Key".to_string()),
|
||||
description: Some("Use it to read information across the whole database. Caution! Do not expose this key on a public frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::AllGet, Action::KeysGet],
|
||||
indexes: vec![IndexUidPattern::all()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_search() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
@@ -347,6 +362,9 @@ pub enum Action {
|
||||
#[serde(rename = "chatsSettings.update")]
|
||||
#[deserr(rename = "chatsSettings.update")]
|
||||
ChatsSettingsUpdate,
|
||||
#[serde(rename = "*.get")]
|
||||
#[deserr(rename = "*.get")]
|
||||
AllGet,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
@@ -385,6 +403,7 @@ impl Action {
|
||||
METRICS_GET => Some(Self::MetricsGet),
|
||||
DUMPS_ALL => Some(Self::DumpsAll),
|
||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||
SNAPSHOTS_ALL => Some(Self::SnapshotsAll),
|
||||
SNAPSHOTS_CREATE => Some(Self::SnapshotsCreate),
|
||||
VERSION => Some(Self::Version),
|
||||
KEYS_CREATE => Some(Self::KeysAdd),
|
||||
@@ -393,12 +412,60 @@ impl Action {
|
||||
KEYS_DELETE => Some(Self::KeysDelete),
|
||||
EXPERIMENTAL_FEATURES_GET => Some(Self::ExperimentalFeaturesGet),
|
||||
EXPERIMENTAL_FEATURES_UPDATE => Some(Self::ExperimentalFeaturesUpdate),
|
||||
EXPORT => Some(Self::Export),
|
||||
NETWORK_GET => Some(Self::NetworkGet),
|
||||
NETWORK_UPDATE => Some(Self::NetworkUpdate),
|
||||
ALL_GET => Some(Self::AllGet),
|
||||
_otherwise => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the action should be included in [Action::AllRead].
|
||||
pub fn is_read(&self) -> bool {
|
||||
use Action::*;
|
||||
|
||||
// It's using an exhaustive match to force the addition of new actions.
|
||||
match self {
|
||||
// Any action that expands to others must return false, as it wouldn't be able to expand recursively.
|
||||
All | AllGet | DocumentsAll | IndexesAll | ChatsAll | TasksAll | SettingsAll
|
||||
| StatsAll | MetricsAll | DumpsAll | SnapshotsAll | ChatsSettingsAll => false,
|
||||
|
||||
Search => true,
|
||||
DocumentsAdd => false,
|
||||
DocumentsGet => true,
|
||||
DocumentsDelete => false,
|
||||
Export => true,
|
||||
IndexesAdd => false,
|
||||
IndexesGet => true,
|
||||
IndexesUpdate => false,
|
||||
IndexesDelete => false,
|
||||
IndexesSwap => false,
|
||||
TasksCancel => false,
|
||||
TasksDelete => false,
|
||||
TasksGet => true,
|
||||
SettingsGet => true,
|
||||
SettingsUpdate => false,
|
||||
StatsGet => true,
|
||||
MetricsGet => true,
|
||||
DumpsCreate => false,
|
||||
SnapshotsCreate => false,
|
||||
Version => true,
|
||||
KeysAdd => false,
|
||||
KeysGet => false, // Disabled in order to prevent privilege escalation
|
||||
KeysUpdate => false,
|
||||
KeysDelete => false,
|
||||
ExperimentalFeaturesGet => true,
|
||||
ExperimentalFeaturesUpdate => false,
|
||||
NetworkGet => true,
|
||||
NetworkUpdate => false,
|
||||
ChatCompletions => false, // Disabled because it might trigger generation of new chats
|
||||
ChatsGet => true,
|
||||
ChatsDelete => false,
|
||||
ChatsSettingsGet => true,
|
||||
ChatsSettingsUpdate => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn repr(&self) -> u8 {
|
||||
*self as u8
|
||||
}
|
||||
@@ -408,6 +475,7 @@ pub mod actions {
|
||||
use super::Action::*;
|
||||
|
||||
pub(crate) const ALL: u8 = All.repr();
|
||||
pub const ALL_GET: u8 = AllGet.repr();
|
||||
pub const SEARCH: u8 = Search.repr();
|
||||
pub const DOCUMENTS_ALL: u8 = DocumentsAll.repr();
|
||||
pub const DOCUMENTS_ADD: u8 = DocumentsAdd.repr();
|
||||
@@ -432,6 +500,7 @@ pub mod actions {
|
||||
pub const METRICS_GET: u8 = MetricsGet.repr();
|
||||
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
||||
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
||||
pub const SNAPSHOTS_ALL: u8 = SnapshotsAll.repr();
|
||||
pub const SNAPSHOTS_CREATE: u8 = SnapshotsCreate.repr();
|
||||
pub const VERSION: u8 = Version.repr();
|
||||
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
||||
@@ -454,3 +523,68 @@ pub mod actions {
|
||||
pub const CHATS_SETTINGS_GET: u8 = ChatsSettingsGet.repr();
|
||||
pub const CHATS_SETTINGS_UPDATE: u8 = ChatsSettingsUpdate.repr();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod test {
|
||||
use super::actions::*;
|
||||
use super::Action::*;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_action_repr_and_constants() {
|
||||
assert!(All.repr() == 0 && ALL == 0);
|
||||
assert!(Search.repr() == 1 && SEARCH == 1);
|
||||
assert!(DocumentsAll.repr() == 2 && DOCUMENTS_ALL == 2);
|
||||
assert!(DocumentsAdd.repr() == 3 && DOCUMENTS_ADD == 3);
|
||||
assert!(DocumentsGet.repr() == 4 && DOCUMENTS_GET == 4);
|
||||
assert!(DocumentsDelete.repr() == 5 && DOCUMENTS_DELETE == 5);
|
||||
assert!(IndexesAll.repr() == 6 && INDEXES_ALL == 6);
|
||||
assert!(IndexesAdd.repr() == 7 && INDEXES_CREATE == 7);
|
||||
assert!(IndexesGet.repr() == 8 && INDEXES_GET == 8);
|
||||
assert!(IndexesUpdate.repr() == 9 && INDEXES_UPDATE == 9);
|
||||
assert!(IndexesDelete.repr() == 10 && INDEXES_DELETE == 10);
|
||||
assert!(IndexesSwap.repr() == 11 && INDEXES_SWAP == 11);
|
||||
assert!(TasksAll.repr() == 12 && TASKS_ALL == 12);
|
||||
assert!(TasksCancel.repr() == 13 && TASKS_CANCEL == 13);
|
||||
assert!(TasksDelete.repr() == 14 && TASKS_DELETE == 14);
|
||||
assert!(TasksGet.repr() == 15 && TASKS_GET == 15);
|
||||
assert!(SettingsAll.repr() == 16 && SETTINGS_ALL == 16);
|
||||
assert!(SettingsGet.repr() == 17 && SETTINGS_GET == 17);
|
||||
assert!(SettingsUpdate.repr() == 18 && SETTINGS_UPDATE == 18);
|
||||
assert!(StatsAll.repr() == 19 && STATS_ALL == 19);
|
||||
assert!(StatsGet.repr() == 20 && STATS_GET == 20);
|
||||
assert!(MetricsAll.repr() == 21 && METRICS_ALL == 21);
|
||||
assert!(MetricsGet.repr() == 22 && METRICS_GET == 22);
|
||||
assert!(DumpsAll.repr() == 23 && DUMPS_ALL == 23);
|
||||
assert!(DumpsCreate.repr() == 24 && DUMPS_CREATE == 24);
|
||||
assert!(SnapshotsAll.repr() == 25 && SNAPSHOTS_ALL == 25);
|
||||
assert!(SnapshotsCreate.repr() == 26 && SNAPSHOTS_CREATE == 26);
|
||||
assert!(Version.repr() == 27 && VERSION == 27);
|
||||
assert!(KeysAdd.repr() == 28 && KEYS_CREATE == 28);
|
||||
assert!(KeysGet.repr() == 29 && KEYS_GET == 29);
|
||||
assert!(KeysUpdate.repr() == 30 && KEYS_UPDATE == 30);
|
||||
assert!(KeysDelete.repr() == 31 && KEYS_DELETE == 31);
|
||||
assert!(ExperimentalFeaturesGet.repr() == 32 && EXPERIMENTAL_FEATURES_GET == 32);
|
||||
assert!(ExperimentalFeaturesUpdate.repr() == 33 && EXPERIMENTAL_FEATURES_UPDATE == 33);
|
||||
assert!(Export.repr() == 34 && EXPORT == 34);
|
||||
assert!(NetworkGet.repr() == 35 && NETWORK_GET == 35);
|
||||
assert!(NetworkUpdate.repr() == 36 && NETWORK_UPDATE == 36);
|
||||
assert!(ChatCompletions.repr() == 37 && CHAT_COMPLETIONS == 37);
|
||||
assert!(ChatsAll.repr() == 38 && CHATS_ALL == 38);
|
||||
assert!(ChatsGet.repr() == 39 && CHATS_GET == 39);
|
||||
assert!(ChatsDelete.repr() == 40 && CHATS_DELETE == 40);
|
||||
assert!(ChatsSettingsAll.repr() == 41 && CHATS_SETTINGS_ALL == 41);
|
||||
assert!(ChatsSettingsGet.repr() == 42 && CHATS_SETTINGS_GET == 42);
|
||||
assert!(ChatsSettingsUpdate.repr() == 43 && CHATS_SETTINGS_UPDATE == 43);
|
||||
assert!(AllGet.repr() == 44 && ALL_GET == 44);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_repr() {
|
||||
for action in enum_iterator::all::<Action>() {
|
||||
let repr = action.repr();
|
||||
let action_from_repr = Action::from_repr(repr);
|
||||
assert_eq!(Some(action), action_from_repr, "Failed for action: {:?}", action);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ pub mod index_uid;
|
||||
pub mod index_uid_pattern;
|
||||
pub mod keys;
|
||||
pub mod locales;
|
||||
pub mod network;
|
||||
pub mod settings;
|
||||
pub mod star_or;
|
||||
pub mod task_view;
|
||||
|
||||
47
crates/meilisearch-types/src/network.rs
Normal file
47
crates/meilisearch-types/src/network.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
// Copyright © 2025 Meilisearch Some Rights Reserved
|
||||
// This file is part of Meilisearch Enterprise Edition (EE).
|
||||
// Use of this source code is governed by the Business Source License 1.1,
|
||||
// as found in the LICENSE-EE file or at <https://mariadb.com/bsl11>
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use milli::update::new::indexer::sharding::Shards;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Network {
|
||||
#[serde(default, rename = "self")]
|
||||
pub local: Option<String>,
|
||||
#[serde(default)]
|
||||
pub remotes: BTreeMap<String, Remote>,
|
||||
#[serde(default)]
|
||||
pub sharding: bool,
|
||||
}
|
||||
|
||||
impl Network {
|
||||
pub fn shards(&self) -> Option<Shards> {
|
||||
if self.sharding {
|
||||
let this = self.local.as_deref().expect("Inconsistent `sharding` and `self`");
|
||||
let others = self
|
||||
.remotes
|
||||
.keys()
|
||||
.filter(|name| name.as_str() != this)
|
||||
.map(|name| name.to_owned())
|
||||
.collect();
|
||||
Some(Shards { own: vec![this.to_owned()], others })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Remote {
|
||||
pub url: String,
|
||||
#[serde(default)]
|
||||
pub search_api_key: Option<String>,
|
||||
#[serde(default)]
|
||||
pub write_api_key: Option<String>,
|
||||
}
|
||||
@@ -11,6 +11,7 @@ use crate::error::ResponseError;
|
||||
use crate::settings::{Settings, Unchecked};
|
||||
use crate::tasks::{
|
||||
serialize_duration, Details, DetailsExportIndexSettings, IndexSwap, Kind, Status, Task, TaskId,
|
||||
TaskNetwork,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, ToSchema)]
|
||||
@@ -51,6 +52,9 @@ pub struct TaskView {
|
||||
#[schema(value_type = String, example = json!("2024-08-08_14:12:09.393Z"))]
|
||||
#[serde(with = "time::serde::rfc3339::option", default)]
|
||||
pub finished_at: Option<OffsetDateTime>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub network: Option<TaskNetwork>,
|
||||
}
|
||||
|
||||
impl TaskView {
|
||||
@@ -68,6 +72,7 @@ impl TaskView {
|
||||
enqueued_at: task.enqueued_at,
|
||||
started_at: task.started_at,
|
||||
finished_at: task.finished_at,
|
||||
network: task.network.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,6 +42,9 @@ pub struct Task {
|
||||
|
||||
pub status: Status,
|
||||
pub kind: KindWithContent,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub network: Option<TaskNetwork>,
|
||||
}
|
||||
|
||||
impl Task {
|
||||
@@ -704,6 +707,36 @@ pub enum Details {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, ToSchema)]
|
||||
#[serde(untagged, rename_all = "camelCase")]
|
||||
pub enum TaskNetwork {
|
||||
Origin { origin: Origin },
|
||||
Remotes { remote_tasks: BTreeMap<String, RemoteTask> },
|
||||
}
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, ToSchema)]
|
||||
#[serde(untagged, rename_all = "camelCase")]
|
||||
pub struct Origin {
|
||||
pub remote_name: String,
|
||||
pub task_uid: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RemoteTask {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
task_uid: Option<TaskId>,
|
||||
error: Option<ResponseError>,
|
||||
}
|
||||
|
||||
impl From<Result<TaskId, ResponseError>> for RemoteTask {
|
||||
fn from(res: Result<TaskId, ResponseError>) -> RemoteTask {
|
||||
match res {
|
||||
Ok(task_uid) => RemoteTask { task_uid: Some(task_uid), error: None },
|
||||
Err(err) => RemoteTask { task_uid: None, error: Some(err) },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, ToSchema)]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct DetailsExportIndexSettings {
|
||||
|
||||
@@ -50,6 +50,7 @@ jsonwebtoken = "9.3.1"
|
||||
lazy_static = "1.5.0"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
memmap2 = "0.9.7"
|
||||
mimalloc = { version = "0.1.47", default-features = false }
|
||||
mime = "0.3.17"
|
||||
num_cpus = "1.17.0"
|
||||
@@ -114,6 +115,9 @@ utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] }
|
||||
async-openai = { git = "https://github.com/meilisearch/async-openai", branch = "better-error-handling" }
|
||||
secrecy = "0.10.3"
|
||||
actix-web-lab = { version = "0.24.1", default-features = false }
|
||||
urlencoding = "2.1.3"
|
||||
backoff = { version = "0.4.0", features = ["tokio"] }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.10.0"
|
||||
@@ -124,7 +128,6 @@ manifest-dir-macros = "0.1.18"
|
||||
maplit = "1.0.2"
|
||||
meili-snap = { path = "../meili-snap" }
|
||||
temp-env = "0.3.6"
|
||||
urlencoding = "2.1.3"
|
||||
wiremock = "0.6.3"
|
||||
yaup = "0.3.1"
|
||||
|
||||
|
||||
@@ -104,6 +104,4 @@ impl Analytics for MockAnalytics {
|
||||
_request: &HttpRequest,
|
||||
) {
|
||||
}
|
||||
fn get_fetch_documents(&self, _documents_query: &DocumentFetchKind, _request: &HttpRequest) {}
|
||||
fn post_fetch_documents(&self, _documents_query: &DocumentFetchKind, _request: &HttpRequest) {}
|
||||
}
|
||||
|
||||
@@ -73,12 +73,6 @@ pub enum DocumentDeletionKind {
|
||||
PerFilter,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum DocumentFetchKind {
|
||||
PerDocumentId { retrieve_vectors: bool },
|
||||
Normal { with_filter: bool, limit: usize, offset: usize, retrieve_vectors: bool },
|
||||
}
|
||||
|
||||
/// To send an event to segment, your event must be able to aggregate itself with another event of the same type.
|
||||
pub trait Aggregate: 'static + mopa::Any + Send {
|
||||
/// The name of the event that will be sent to segment.
|
||||
|
||||
@@ -203,6 +203,7 @@ struct Infos {
|
||||
experimental_composite_embedders: bool,
|
||||
experimental_embedding_cache_entries: usize,
|
||||
experimental_no_snapshot_compaction: bool,
|
||||
experimental_no_edition_2024_for_dumps: bool,
|
||||
experimental_no_edition_2024_for_settings: bool,
|
||||
gpu_enabled: bool,
|
||||
db_path: bool,
|
||||
@@ -293,6 +294,7 @@ impl Infos {
|
||||
max_indexing_threads,
|
||||
skip_index_budget: _,
|
||||
experimental_no_edition_2024_for_settings,
|
||||
experimental_no_edition_2024_for_dumps,
|
||||
} = indexer_options;
|
||||
|
||||
let RuntimeTogglableFeatures {
|
||||
@@ -329,6 +331,7 @@ impl Infos {
|
||||
experimental_composite_embedders: composite_embedders,
|
||||
experimental_embedding_cache_entries,
|
||||
experimental_no_snapshot_compaction,
|
||||
experimental_no_edition_2024_for_dumps,
|
||||
gpu_enabled: meilisearch_types::milli::vector::is_cuda_enabled(),
|
||||
db_path: db_path != PathBuf::from("./data.ms"),
|
||||
import_dump: import_dump.is_some(),
|
||||
|
||||
@@ -9,6 +9,8 @@ use meilisearch_types::milli::OrderBy;
|
||||
use serde_json::Value;
|
||||
use tokio::task::JoinError;
|
||||
|
||||
use crate::routes::indexes::{PROXY_ORIGIN_REMOTE_HEADER, PROXY_ORIGIN_TASK_UID_HEADER};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum MeilisearchHttpError {
|
||||
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: {}",
|
||||
@@ -49,7 +51,7 @@ pub enum MeilisearchHttpError {
|
||||
TooManySearchRequests(usize),
|
||||
#[error("Internal error: Search limiter is down.")]
|
||||
SearchLimiterIsDown,
|
||||
#[error("The provided payload reached the size limit. The maximum accepted payload size is {}.", Byte::from_u64(*.0 as u64).get_appropriate_unit(UnitType::Binary))]
|
||||
#[error("The provided payload reached the size limit. The maximum accepted payload size is {}.", Byte::from_u64(*.0 as u64).get_appropriate_unit(if *.0 % 1024 == 0 { UnitType::Binary } else { UnitType::Decimal }))]
|
||||
PayloadTooLarge(usize),
|
||||
#[error("Two indexes must be given for each swap. The list `[{}]` contains {} indexes.",
|
||||
.0.iter().map(|uid| format!("\"{uid}\"")).collect::<Vec<_>>().join(", "), .0.len()
|
||||
@@ -80,6 +82,16 @@ pub enum MeilisearchHttpError {
|
||||
MissingSearchHybrid,
|
||||
#[error("Invalid request: both `media` and `vector` parameters are present.")]
|
||||
MediaAndVector,
|
||||
#[error("Inconsistent `Origin` headers: {} was provided but {} is missing.\n - Hint: Either both headers should be provided, or none of them", if *is_remote_missing {
|
||||
PROXY_ORIGIN_TASK_UID_HEADER
|
||||
} else { PROXY_ORIGIN_REMOTE_HEADER },
|
||||
if *is_remote_missing {
|
||||
PROXY_ORIGIN_REMOTE_HEADER
|
||||
} else { PROXY_ORIGIN_TASK_UID_HEADER }
|
||||
)]
|
||||
InconsistentOriginHeaders { is_remote_missing: bool },
|
||||
#[error("Invalid value for header {header_name}: {msg}")]
|
||||
InvalidHeaderValue { header_name: &'static str, msg: String },
|
||||
}
|
||||
|
||||
impl MeilisearchHttpError {
|
||||
@@ -124,6 +136,10 @@ impl ErrorCode for MeilisearchHttpError {
|
||||
MeilisearchHttpError::InconsistentFacetOrder { .. } => {
|
||||
Code::InvalidMultiSearchFacetOrder
|
||||
}
|
||||
MeilisearchHttpError::InconsistentOriginHeaders { .. } => {
|
||||
Code::InconsistentDocumentChangeHeaders
|
||||
}
|
||||
MeilisearchHttpError::InvalidHeaderValue { .. } => Code::InvalidHeaderValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest};
|
||||
use analytics::Analytics;
|
||||
use anyhow::bail;
|
||||
use bumpalo::Bump;
|
||||
use error::PayloadError;
|
||||
use extractors::payload::PayloadConfig;
|
||||
use index_scheduler::versioning::Versioning;
|
||||
@@ -38,6 +39,7 @@ use meilisearch_auth::{open_auth_store_env, AuthController};
|
||||
use meilisearch_types::milli::constants::VERSION_MAJOR;
|
||||
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||
use meilisearch_types::milli::progress::{EmbedderStats, Progress};
|
||||
use meilisearch_types::milli::update::new::indexer;
|
||||
use meilisearch_types::milli::update::{
|
||||
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
|
||||
};
|
||||
@@ -533,7 +535,7 @@ fn import_dump(
|
||||
let mut index_reader = index_reader?;
|
||||
let metadata = index_reader.metadata();
|
||||
let uid = metadata.uid.clone();
|
||||
tracing::info!("Importing index `{}`.", metadata.uid);
|
||||
tracing::info!("Importing index `{uid}`.");
|
||||
|
||||
let date = Some((metadata.created_at, metadata.updated_at));
|
||||
let index = index_scheduler.create_raw_index(&metadata.uid, date)?;
|
||||
@@ -552,48 +554,101 @@ fn import_dump(
|
||||
apply_settings_to_builder(&settings, &mut builder);
|
||||
let embedder_stats: Arc<EmbedderStats> = Default::default();
|
||||
builder.execute(&|| false, &progress, embedder_stats.clone())?;
|
||||
wtxn.commit()?;
|
||||
|
||||
// 5.3 Import the documents.
|
||||
// 5.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||
tracing::info!("Importing the documents.");
|
||||
let file = tempfile::tempfile()?;
|
||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||
for document in index_reader.documents()? {
|
||||
builder.append_json_object(&document?)?;
|
||||
let mut wtxn = index.write_txn()?;
|
||||
let rtxn = index.read_txn()?;
|
||||
|
||||
if index_scheduler.no_edition_2024_for_dumps() {
|
||||
// 5.3 Import the documents.
|
||||
// 5.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||
tracing::info!("Importing the documents.");
|
||||
let file = tempfile::tempfile()?;
|
||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||
for document in index_reader.documents()? {
|
||||
builder.append_json_object(&document?)?;
|
||||
}
|
||||
|
||||
// This flush the content of the batch builder.
|
||||
let file = builder.into_inner()?.into_inner()?;
|
||||
|
||||
// 5.3.2 We feed it to the milli index.
|
||||
let reader = BufReader::new(file);
|
||||
let reader = DocumentsBatchReader::from_reader(reader)?;
|
||||
|
||||
let embedder_configs = index.embedding_configs().embedding_configs(&wtxn)?;
|
||||
let embedders = index_scheduler.embedders(uid.to_string(), embedder_configs)?;
|
||||
|
||||
let builder = milli::update::IndexDocuments::new(
|
||||
&mut wtxn,
|
||||
&index,
|
||||
indexer_config,
|
||||
IndexDocumentsConfig {
|
||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..Default::default()
|
||||
},
|
||||
|indexing_step| tracing::trace!("update: {:?}", indexing_step),
|
||||
|| false,
|
||||
&embedder_stats,
|
||||
)?;
|
||||
|
||||
let builder = builder.with_embedders(embedders);
|
||||
|
||||
let (builder, user_result) = builder.add_documents(reader)?;
|
||||
let user_result = user_result?;
|
||||
tracing::info!(documents_found = user_result, "{} documents found.", user_result);
|
||||
builder.execute()?;
|
||||
} else {
|
||||
let db_fields_ids_map = index.fields_ids_map(&rtxn)?;
|
||||
let primary_key = index.primary_key(&rtxn)?;
|
||||
let mut new_fields_ids_map = db_fields_ids_map.clone();
|
||||
|
||||
let mut indexer = indexer::DocumentOperation::new();
|
||||
let embedders = index.embedding_configs().embedding_configs(&rtxn)?;
|
||||
let embedders = index_scheduler.embedders(uid.clone(), embedders)?;
|
||||
|
||||
let mmap = unsafe { memmap2::Mmap::map(index_reader.documents_file())? };
|
||||
|
||||
indexer.replace_documents(&mmap)?;
|
||||
|
||||
let indexer_config = index_scheduler.indexer_config();
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let indexer_alloc = Bump::new();
|
||||
let (document_changes, mut operation_stats, primary_key) = indexer.into_changes(
|
||||
&indexer_alloc,
|
||||
&index,
|
||||
&rtxn,
|
||||
primary_key,
|
||||
&mut new_fields_ids_map,
|
||||
&|| false, // never stop processing a dump
|
||||
progress.clone(),
|
||||
None,
|
||||
)?;
|
||||
|
||||
let operation_stats = operation_stats.pop().unwrap();
|
||||
if let Some(error) = operation_stats.error {
|
||||
return Err(error.into());
|
||||
}
|
||||
|
||||
let _congestion = indexer::index(
|
||||
&mut wtxn,
|
||||
&index,
|
||||
pool,
|
||||
indexer_config.grenad_parameters(),
|
||||
&db_fields_ids_map,
|
||||
new_fields_ids_map,
|
||||
primary_key,
|
||||
&document_changes,
|
||||
embedders,
|
||||
&|| false, // never stop processing a dump
|
||||
&progress,
|
||||
&embedder_stats,
|
||||
)?;
|
||||
}
|
||||
|
||||
// This flush the content of the batch builder.
|
||||
let file = builder.into_inner()?.into_inner()?;
|
||||
|
||||
// 5.3.2 We feed it to the milli index.
|
||||
let reader = BufReader::new(file);
|
||||
let reader = DocumentsBatchReader::from_reader(reader)?;
|
||||
|
||||
let embedder_configs = index.embedding_configs().embedding_configs(&wtxn)?;
|
||||
let embedders = index_scheduler.embedders(uid.to_string(), embedder_configs)?;
|
||||
|
||||
let builder = milli::update::IndexDocuments::new(
|
||||
&mut wtxn,
|
||||
&index,
|
||||
indexer_config,
|
||||
IndexDocumentsConfig {
|
||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..Default::default()
|
||||
},
|
||||
|indexing_step| tracing::trace!("update: {:?}", indexing_step),
|
||||
|| false,
|
||||
&embedder_stats,
|
||||
)?;
|
||||
|
||||
let builder = builder.with_embedders(embedders);
|
||||
|
||||
let (builder, user_result) = builder.add_documents(reader)?;
|
||||
let user_result = user_result?;
|
||||
tracing::info!(documents_found = user_result, "{} documents found.", user_result);
|
||||
builder.execute()?;
|
||||
wtxn.commit()?;
|
||||
tracing::info!("All documents successfully imported.");
|
||||
|
||||
index_scheduler.refresh_index_stats(&uid)?;
|
||||
}
|
||||
|
||||
|
||||
@@ -68,6 +68,8 @@ const MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE: &str =
|
||||
const MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES: &str =
|
||||
"MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES";
|
||||
const MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION: &str = "MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION";
|
||||
const MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_DUMPS: &str =
|
||||
"MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_DUMPS";
|
||||
const DEFAULT_CONFIG_FILE_PATH: &str = "./config.toml";
|
||||
const DEFAULT_DB_PATH: &str = "./data.ms";
|
||||
const DEFAULT_HTTP_ADDR: &str = "localhost:7700";
|
||||
@@ -759,6 +761,15 @@ pub struct IndexerOpts {
|
||||
#[clap(long, env = MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS)]
|
||||
#[serde(default)]
|
||||
pub experimental_no_edition_2024_for_settings: bool,
|
||||
|
||||
/// Experimental make dump imports use the old document indexer.
|
||||
///
|
||||
/// When enabled, Meilisearch will use the old document indexer when importing dumps.
|
||||
///
|
||||
/// For more information, see <https://github.com/orgs/meilisearch/discussions/851>.
|
||||
#[clap(long, env = MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_DUMPS)]
|
||||
#[serde(default)]
|
||||
pub experimental_no_edition_2024_for_dumps: bool,
|
||||
}
|
||||
|
||||
impl IndexerOpts {
|
||||
@@ -769,6 +780,7 @@ impl IndexerOpts {
|
||||
max_indexing_threads,
|
||||
skip_index_budget: _,
|
||||
experimental_no_edition_2024_for_settings,
|
||||
experimental_no_edition_2024_for_dumps,
|
||||
} = self;
|
||||
if let Some(max_indexing_memory) = max_indexing_memory.0 {
|
||||
export_to_env_if_not_present(
|
||||
@@ -788,6 +800,12 @@ impl IndexerOpts {
|
||||
experimental_no_edition_2024_for_settings.to_string(),
|
||||
);
|
||||
}
|
||||
if experimental_no_edition_2024_for_dumps {
|
||||
export_to_env_if_not_present(
|
||||
MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_DUMPS,
|
||||
experimental_no_edition_2024_for_dumps.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -808,6 +826,7 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
|
||||
skip_index_budget: other.skip_index_budget,
|
||||
experimental_no_edition_2024_for_settings: other
|
||||
.experimental_no_edition_2024_for_settings,
|
||||
experimental_no_edition_2024_for_dumps: other.experimental_no_edition_2024_for_dumps,
|
||||
chunk_compression_type: Default::default(),
|
||||
chunk_compression_level: Default::default(),
|
||||
documents_chunk_size: Default::default(),
|
||||
|
||||
@@ -27,9 +27,10 @@ use meilisearch_types::features::{
|
||||
ChatCompletionPrompts as DbChatCompletionPrompts,
|
||||
ChatCompletionSource as DbChatCompletionSource, SystemRole,
|
||||
};
|
||||
use meilisearch_types::heed::RoTxn;
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::index::ChatConfig;
|
||||
use meilisearch_types::milli::{all_obkv_to_json, obkv_to_json, TimeBudget};
|
||||
use meilisearch_types::milli::{all_obkv_to_json, obkv_to_json, OrderBy, PatternMatch, TimeBudget};
|
||||
use meilisearch_types::{Document, Index};
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
@@ -169,6 +170,7 @@ fn setup_search_tool(
|
||||
|
||||
let mut index_uids = Vec::new();
|
||||
let mut function_description = prompts.search_description.clone();
|
||||
let mut filter_description = prompts.search_filter_param.clone();
|
||||
index_scheduler.try_for_each_index::<_, ()>(|name, index| {
|
||||
// Make sure to skip unauthorized indexes
|
||||
if !filters.is_index_authorized(name) {
|
||||
@@ -180,16 +182,22 @@ fn setup_search_tool(
|
||||
let index_description = chat_config.description;
|
||||
let _ = writeln!(&mut function_description, "\n\n - {name}: {index_description}\n");
|
||||
index_uids.push(name.to_string());
|
||||
let facet_distributions = format_facet_distributions(index, &rtxn, 10).unwrap(); // TODO do not unwrap
|
||||
let _ = writeln!(&mut filter_description, "\n## Facet distributions of the {name} index");
|
||||
let _ = writeln!(&mut filter_description, "{facet_distributions}");
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
tracing::debug!("LLM function description: {function_description}");
|
||||
tracing::debug!("LLM filter description: {filter_description}");
|
||||
|
||||
let tool = ChatCompletionToolArgs::default()
|
||||
.r#type(ChatCompletionToolType::Function)
|
||||
.function(
|
||||
FunctionObjectArgs::default()
|
||||
.name(MEILI_SEARCH_IN_INDEX_FUNCTION_NAME)
|
||||
.description(&function_description)
|
||||
.description(function_description)
|
||||
.parameters(json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -203,9 +211,13 @@ fn setup_search_tool(
|
||||
// "type": ["string", "null"],
|
||||
"type": "string",
|
||||
"description": prompts.search_q_param,
|
||||
},
|
||||
"filter": {
|
||||
"type": "string",
|
||||
"description": filter_description,
|
||||
}
|
||||
},
|
||||
"required": ["index_uid", "q"],
|
||||
"required": ["index_uid", "q", "filter"],
|
||||
"additionalProperties": false,
|
||||
}))
|
||||
.strict(true)
|
||||
@@ -247,11 +259,19 @@ async fn process_search_request(
|
||||
auth_token: &str,
|
||||
index_uid: String,
|
||||
q: Option<String>,
|
||||
filter: Option<String>,
|
||||
) -> Result<(Index, Vec<Document>, String), ResponseError> {
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let rtxn = index.static_read_txn()?;
|
||||
let ChatConfig { description: _, prompt: _, search_parameters } = index.chat_config(&rtxn)?;
|
||||
let mut query = SearchQuery { q, ..SearchQuery::from(search_parameters) };
|
||||
let mut query = SearchQuery {
|
||||
q,
|
||||
filter: filter.map(serde_json::Value::from),
|
||||
..SearchQuery::from(search_parameters)
|
||||
};
|
||||
|
||||
tracing::debug!("LLM query: {:?}", query);
|
||||
|
||||
let auth_filter = ActionPolicy::<{ actions::SEARCH }>::authenticate(
|
||||
auth_ctrl,
|
||||
auth_token,
|
||||
@@ -280,14 +300,23 @@ async fn process_search_request(
|
||||
let (search, _is_finite_pagination, _max_total_hits, _offset) =
|
||||
prepare_search(&index_cloned, &rtxn, &query, &search_kind, time_budget, features)?;
|
||||
|
||||
search_from_kind(index_uid, search_kind, search)
|
||||
.map(|(search_results, _)| (rtxn, search_results))
|
||||
.map_err(ResponseError::from)
|
||||
match search_from_kind(index_uid, search_kind, search) {
|
||||
Ok((search_results, _)) => Ok((rtxn, Ok(search_results))),
|
||||
Err(MeilisearchHttpError::Milli {
|
||||
error: meilisearch_types::milli::Error::UserError(user_error),
|
||||
index_name: _,
|
||||
}) => Ok((rtxn, Err(user_error))),
|
||||
Err(err) => Err(ResponseError::from(err)),
|
||||
}
|
||||
})
|
||||
.await;
|
||||
permit.drop().await;
|
||||
|
||||
let output = output?;
|
||||
let output = match output? {
|
||||
Ok((rtxn, Ok(search_results))) => Ok((rtxn, search_results)),
|
||||
Ok((_rtxn, Err(error))) => return Ok((index, Vec::new(), error.to_string())),
|
||||
Err(err) => Err(err),
|
||||
};
|
||||
let mut documents = Vec::new();
|
||||
if let Ok((ref rtxn, ref search_result)) = output {
|
||||
MEILISEARCH_CHAT_SEARCH_REQUESTS.with_label_values(&["internal"]).inc();
|
||||
@@ -395,16 +424,19 @@ async fn non_streamed_chat(
|
||||
|
||||
for call in meili_calls {
|
||||
let result = match serde_json::from_str(&call.function.arguments) {
|
||||
Ok(SearchInIndexParameters { index_uid, q }) => process_search_request(
|
||||
&index_scheduler,
|
||||
auth_ctrl.clone(),
|
||||
&search_queue,
|
||||
auth_token,
|
||||
index_uid,
|
||||
q,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| e.to_string()),
|
||||
Ok(SearchInIndexParameters { index_uid, q, filter }) => {
|
||||
process_search_request(
|
||||
&index_scheduler,
|
||||
auth_ctrl.clone(),
|
||||
&search_queue,
|
||||
auth_token,
|
||||
index_uid,
|
||||
q,
|
||||
filter,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
Err(err) => Err(err.to_string()),
|
||||
};
|
||||
|
||||
@@ -719,13 +751,14 @@ async fn handle_meili_tools(
|
||||
let mut error = None;
|
||||
|
||||
let result = match serde_json::from_str(&call.function.arguments) {
|
||||
Ok(SearchInIndexParameters { index_uid, q }) => match process_search_request(
|
||||
Ok(SearchInIndexParameters { index_uid, q, filter }) => match process_search_request(
|
||||
index_scheduler,
|
||||
auth_ctrl.clone(),
|
||||
search_queue,
|
||||
auth_token,
|
||||
index_uid,
|
||||
q,
|
||||
filter,
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -801,4 +834,42 @@ struct SearchInIndexParameters {
|
||||
index_uid: String,
|
||||
/// The query parameter to use.
|
||||
q: Option<String>,
|
||||
/// The filter parameter to use.
|
||||
filter: Option<String>,
|
||||
}
|
||||
|
||||
fn format_facet_distributions(
|
||||
index: &Index,
|
||||
rtxn: &RoTxn,
|
||||
max_values_per_facet: usize,
|
||||
) -> meilisearch_types::milli::Result<String> {
|
||||
let universe = index.documents_ids(rtxn)?;
|
||||
let rules = index.filterable_attributes_rules(rtxn)?;
|
||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
let filterable_attributes = fields_ids_map
|
||||
.names()
|
||||
.filter(|name| rules.iter().any(|rule| matches!(rule.match_str(name), PatternMatch::Match)))
|
||||
.map(|name| (name, OrderBy::Count));
|
||||
let facets_distribution = index
|
||||
.facets_distribution(rtxn)
|
||||
.max_values_per_facet(max_values_per_facet)
|
||||
.candidates(universe)
|
||||
.facets(filterable_attributes)
|
||||
.execute()?;
|
||||
|
||||
let mut output = String::new();
|
||||
for (facet_name, entries) in facets_distribution {
|
||||
let _ = write!(&mut output, "{}: ", facet_name);
|
||||
let total_entries = entries.len();
|
||||
for (i, (value, _count)) in entries.into_iter().enumerate() {
|
||||
let _ = if total_entries.saturating_sub(1) == i {
|
||||
write!(&mut output, "{value}.")
|
||||
} else {
|
||||
write!(&mut output, "{value}, ")
|
||||
};
|
||||
}
|
||||
let _ = writeln!(&mut output);
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
@@ -8,8 +8,8 @@ use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::features::{
|
||||
ChatCompletionPrompts as DbChatCompletionPrompts, ChatCompletionSettings,
|
||||
ChatCompletionSource as DbChatCompletionSource, DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT,
|
||||
DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT, DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT,
|
||||
DEFAULT_CHAT_SYSTEM_PROMPT,
|
||||
DEFAULT_CHAT_SEARCH_FILTER_PARAM_PROMPT, DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT,
|
||||
DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT, DEFAULT_CHAT_SYSTEM_PROMPT,
|
||||
};
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
@@ -84,6 +84,11 @@ async fn patch_settings(
|
||||
Setting::Reset => DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT.to_string(),
|
||||
Setting::NotSet => old_settings.prompts.search_q_param,
|
||||
},
|
||||
search_filter_param: match new_prompts.search_filter_param {
|
||||
Setting::Set(new_description) => new_description,
|
||||
Setting::Reset => DEFAULT_CHAT_SEARCH_FILTER_PARAM_PROMPT.to_string(),
|
||||
Setting::NotSet => old_settings.prompts.search_filter_param,
|
||||
},
|
||||
search_index_uid_param: match new_prompts.search_index_uid_param {
|
||||
Setting::Set(new_description) => new_description,
|
||||
Setting::Reset => DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT.to_string(),
|
||||
@@ -252,6 +257,10 @@ pub struct ChatPrompts {
|
||||
#[schema(value_type = Option<String>, example = json!("This is query parameter..."))]
|
||||
pub search_q_param: Setting<String>,
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchFilterParamPrompt>)]
|
||||
#[schema(value_type = Option<String>, example = json!("This is filter parameter..."))]
|
||||
pub search_filter_param: Setting<String>,
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchIndexUidParamPrompt>)]
|
||||
#[schema(value_type = Option<String>, example = json!("This is index you want to search in..."))]
|
||||
pub search_index_uid_param: Setting<String>,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::collections::HashSet;
|
||||
use std::io::{ErrorKind, Seek as _};
|
||||
use std::marker::PhantomData;
|
||||
use std::str::FromStr;
|
||||
|
||||
use actix_web::http::header::CONTENT_TYPE;
|
||||
use actix_web::web::Data;
|
||||
@@ -17,9 +18,11 @@ use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::heed::RoTxn;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::documents::sort::recursive_sort;
|
||||
use meilisearch_types::milli::index::EmbeddingsWithMetadata;
|
||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors;
|
||||
use meilisearch_types::milli::DocumentId;
|
||||
use meilisearch_types::milli::{AscDesc, DocumentId};
|
||||
use meilisearch_types::serde_cs::vec::CS;
|
||||
use meilisearch_types::star_or::OptionStarOrList;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
@@ -42,6 +45,8 @@ use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::payload::Payload;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::indexes::proxy::{proxy, Body};
|
||||
use crate::routes::indexes::search::fix_sort_query_parameters;
|
||||
use crate::routes::{
|
||||
get_task_id, is_dry_run, PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT,
|
||||
};
|
||||
@@ -135,6 +140,8 @@ pub struct DocumentsFetchAggregator<Method: AggregateMethod> {
|
||||
per_document_id: bool,
|
||||
// if a filter was used
|
||||
per_filter: bool,
|
||||
// if documents were sorted
|
||||
sort: bool,
|
||||
|
||||
#[serde(rename = "vector.retrieve_vectors")]
|
||||
retrieve_vectors: bool,
|
||||
@@ -151,39 +158,6 @@ pub struct DocumentsFetchAggregator<Method: AggregateMethod> {
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum DocumentFetchKind {
|
||||
PerDocumentId { retrieve_vectors: bool },
|
||||
Normal { with_filter: bool, limit: usize, offset: usize, retrieve_vectors: bool, ids: usize },
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> DocumentsFetchAggregator<Method> {
|
||||
pub fn from_query(query: &DocumentFetchKind) -> Self {
|
||||
let (limit, offset, retrieve_vectors) = match query {
|
||||
DocumentFetchKind::PerDocumentId { retrieve_vectors } => (1, 0, *retrieve_vectors),
|
||||
DocumentFetchKind::Normal { limit, offset, retrieve_vectors, .. } => {
|
||||
(*limit, *offset, *retrieve_vectors)
|
||||
}
|
||||
};
|
||||
|
||||
let ids = match query {
|
||||
DocumentFetchKind::Normal { ids, .. } => *ids,
|
||||
DocumentFetchKind::PerDocumentId { .. } => 0,
|
||||
};
|
||||
|
||||
Self {
|
||||
per_document_id: matches!(query, DocumentFetchKind::PerDocumentId { .. }),
|
||||
per_filter: matches!(query, DocumentFetchKind::Normal { with_filter, .. } if *with_filter),
|
||||
max_limit: limit,
|
||||
max_offset: offset,
|
||||
retrieve_vectors,
|
||||
max_document_ids: ids,
|
||||
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for DocumentsFetchAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
@@ -193,6 +167,7 @@ impl<Method: AggregateMethod> Aggregate for DocumentsFetchAggregator<Method> {
|
||||
Box::new(Self {
|
||||
per_document_id: self.per_document_id | new.per_document_id,
|
||||
per_filter: self.per_filter | new.per_filter,
|
||||
sort: self.sort | new.sort,
|
||||
retrieve_vectors: self.retrieve_vectors | new.retrieve_vectors,
|
||||
max_limit: self.max_limit.max(new.max_limit),
|
||||
max_offset: self.max_offset.max(new.max_offset),
|
||||
@@ -276,6 +251,7 @@ pub async fn get_document(
|
||||
retrieve_vectors: param_retrieve_vectors.0,
|
||||
per_document_id: true,
|
||||
per_filter: false,
|
||||
sort: false,
|
||||
max_limit: 0,
|
||||
max_offset: 0,
|
||||
max_document_ids: 0,
|
||||
@@ -359,6 +335,7 @@ pub async fn delete_document(
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let DocumentParam { index_uid, document_id } = path.into_inner();
|
||||
let index_uid = IndexUid::try_from(index_uid)?;
|
||||
let network = index_scheduler.network();
|
||||
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
@@ -376,10 +353,16 @@ pub async fn delete_document(
|
||||
};
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
let dry_run = is_dry_run(&req, &opt)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
||||
.await??
|
||||
.into();
|
||||
let task = {
|
||||
let index_scheduler = index_scheduler.clone();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run)).await??
|
||||
};
|
||||
|
||||
if network.sharding && !dry_run {
|
||||
proxy(&index_scheduler, &index_uid, &req, network, Body::none(), &task).await?;
|
||||
}
|
||||
|
||||
let task: SummarizedTaskView = task.into();
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
@@ -406,6 +389,8 @@ pub struct BrowseQueryGet {
|
||||
#[param(default, value_type = Option<String>, example = "popularity > 1000")]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFilter>)]
|
||||
filter: Option<String>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentSort>)]
|
||||
sort: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr, ToSchema)]
|
||||
@@ -430,6 +415,9 @@ pub struct BrowseQuery {
|
||||
#[schema(default, value_type = Option<Value>, example = "popularity > 1000")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidDocumentFilter>)]
|
||||
filter: Option<Value>,
|
||||
#[schema(default, value_type = Option<Vec<String>>, example = json!(["title:asc", "rating:desc"]))]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidDocumentSort>)]
|
||||
sort: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// Get documents with POST
|
||||
@@ -495,6 +483,7 @@ pub async fn documents_by_query_post(
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsPOST> {
|
||||
per_filter: body.filter.is_some(),
|
||||
sort: body.sort.is_some(),
|
||||
retrieve_vectors: body.retrieve_vectors,
|
||||
max_limit: body.limit,
|
||||
max_offset: body.offset,
|
||||
@@ -571,7 +560,7 @@ pub async fn get_documents(
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?params, "Get documents GET");
|
||||
|
||||
let BrowseQueryGet { limit, offset, fields, retrieve_vectors, filter, ids } =
|
||||
let BrowseQueryGet { limit, offset, fields, retrieve_vectors, filter, ids, sort } =
|
||||
params.into_inner();
|
||||
|
||||
let filter = match filter {
|
||||
@@ -582,20 +571,20 @@ pub async fn get_documents(
|
||||
None => None,
|
||||
};
|
||||
|
||||
let ids = ids.map(|ids| ids.into_iter().map(Into::into).collect());
|
||||
|
||||
let query = BrowseQuery {
|
||||
offset: offset.0,
|
||||
limit: limit.0,
|
||||
fields: fields.merge_star_and_none(),
|
||||
retrieve_vectors: retrieve_vectors.0,
|
||||
filter,
|
||||
ids,
|
||||
ids: ids.map(|ids| ids.into_iter().map(Into::into).collect()),
|
||||
sort: sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||
};
|
||||
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsGET> {
|
||||
per_filter: query.filter.is_some(),
|
||||
sort: query.sort.is_some(),
|
||||
retrieve_vectors: query.retrieve_vectors,
|
||||
max_limit: query.limit,
|
||||
max_offset: query.offset,
|
||||
@@ -615,7 +604,7 @@ fn documents_by_query(
|
||||
query: BrowseQuery,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let BrowseQuery { offset, limit, fields, retrieve_vectors, filter, ids } = query;
|
||||
let BrowseQuery { offset, limit, fields, retrieve_vectors, filter, ids, sort } = query;
|
||||
|
||||
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors);
|
||||
|
||||
@@ -633,6 +622,18 @@ fn documents_by_query(
|
||||
None
|
||||
};
|
||||
|
||||
let sort_criteria = if let Some(sort) = &sort {
|
||||
let sorts: Vec<_> = match sort.iter().map(|s| milli::AscDesc::from_str(s)).collect() {
|
||||
Ok(sorts) => sorts,
|
||||
Err(asc_desc_error) => {
|
||||
return Err(milli::SortError::from(asc_desc_error).into_document_error().into())
|
||||
}
|
||||
};
|
||||
Some(sorts)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let (total, documents) = retrieve_documents(
|
||||
&index,
|
||||
@@ -643,6 +644,7 @@ fn documents_by_query(
|
||||
fields,
|
||||
retrieve_vectors,
|
||||
index_scheduler.features(),
|
||||
sort_criteria,
|
||||
)?;
|
||||
|
||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
||||
@@ -798,7 +800,6 @@ pub async fn replace_documents(
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
let dry_run = is_dry_run(&req, &opt)?;
|
||||
let task = document_addition(
|
||||
extract_mime_type(&req)?,
|
||||
index_scheduler,
|
||||
index_uid,
|
||||
params.primary_key,
|
||||
@@ -808,8 +809,10 @@ pub async fn replace_documents(
|
||||
uid,
|
||||
dry_run,
|
||||
allow_index_creation,
|
||||
&req,
|
||||
)
|
||||
.await?;
|
||||
|
||||
debug!(returns = ?task, "Replace documents");
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
@@ -899,7 +902,6 @@ pub async fn update_documents(
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
let dry_run = is_dry_run(&req, &opt)?;
|
||||
let task = document_addition(
|
||||
extract_mime_type(&req)?,
|
||||
index_scheduler,
|
||||
index_uid,
|
||||
params.primary_key,
|
||||
@@ -909,6 +911,7 @@ pub async fn update_documents(
|
||||
uid,
|
||||
dry_run,
|
||||
allow_index_creation,
|
||||
&req,
|
||||
)
|
||||
.await?;
|
||||
debug!(returns = ?task, "Update documents");
|
||||
@@ -918,7 +921,6 @@ pub async fn update_documents(
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn document_addition(
|
||||
mime_type: Option<Mime>,
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||
index_uid: IndexUid,
|
||||
primary_key: Option<String>,
|
||||
@@ -928,7 +930,11 @@ async fn document_addition(
|
||||
task_id: Option<TaskId>,
|
||||
dry_run: bool,
|
||||
allow_index_creation: bool,
|
||||
req: &HttpRequest,
|
||||
) -> Result<SummarizedTaskView, MeilisearchHttpError> {
|
||||
let mime_type = extract_mime_type(req)?;
|
||||
let network = index_scheduler.network();
|
||||
|
||||
let format = match (
|
||||
mime_type.as_ref().map(|m| (m.type_().as_str(), m.subtype().as_str())),
|
||||
csv_delimiter,
|
||||
@@ -960,7 +966,7 @@ async fn document_addition(
|
||||
};
|
||||
|
||||
let (uuid, mut update_file) = index_scheduler.queue.create_update_file(dry_run)?;
|
||||
let documents_count = match format {
|
||||
let res = match format {
|
||||
PayloadType::Ndjson => {
|
||||
let (path, file) = update_file.into_parts();
|
||||
let file = match file {
|
||||
@@ -975,19 +981,19 @@ async fn document_addition(
|
||||
None => None,
|
||||
};
|
||||
|
||||
let documents_count = tokio::task::spawn_blocking(move || {
|
||||
let res = tokio::task::spawn_blocking(move || {
|
||||
let documents_count = file.as_ref().map_or(Ok(0), |ntf| {
|
||||
read_ndjson(ntf.as_file()).map_err(MeilisearchHttpError::DocumentFormat)
|
||||
})?;
|
||||
|
||||
let update_file = file_store::File::from_parts(path, file);
|
||||
update_file.persist()?;
|
||||
let update_file = update_file.persist()?;
|
||||
|
||||
Ok(documents_count)
|
||||
Ok((documents_count, update_file))
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(documents_count)
|
||||
Ok(res)
|
||||
}
|
||||
PayloadType::Json | PayloadType::Csv { delimiter: _ } => {
|
||||
let temp_file = match tempfile() {
|
||||
@@ -1006,16 +1012,16 @@ async fn document_addition(
|
||||
unreachable!("We already wrote the user content into the update file")
|
||||
}
|
||||
};
|
||||
// we NEED to persist the file here because we moved the `udpate_file` in another task.
|
||||
update_file.persist()?;
|
||||
Ok(documents_count)
|
||||
// we NEED to persist the file here because we moved the `update_file` in another task.
|
||||
let file = update_file.persist()?;
|
||||
Ok((documents_count, file))
|
||||
})
|
||||
.await
|
||||
}
|
||||
};
|
||||
|
||||
let documents_count = match documents_count {
|
||||
Ok(Ok(documents_count)) => documents_count,
|
||||
let (documents_count, file) = match res {
|
||||
Ok(Ok((documents_count, file))) => (documents_count, file),
|
||||
// in this case the file has not possibly be persisted.
|
||||
Ok(Err(e)) => return Err(e),
|
||||
Err(e) => {
|
||||
@@ -1057,6 +1063,20 @@ async fn document_addition(
|
||||
}
|
||||
};
|
||||
|
||||
if network.sharding {
|
||||
if let Some(file) = file {
|
||||
proxy(
|
||||
&index_scheduler,
|
||||
&index_uid,
|
||||
req,
|
||||
network,
|
||||
Body::with_ndjson_payload(file),
|
||||
&task,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(task.into())
|
||||
}
|
||||
|
||||
@@ -1135,6 +1155,7 @@ pub async fn delete_documents_batch(
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Delete documents by batch");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let network = index_scheduler.network();
|
||||
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
@@ -1155,16 +1176,22 @@ pub async fn delete_documents_batch(
|
||||
KindWithContent::DocumentDeletion { index_uid: index_uid.to_string(), documents_ids: ids };
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
let dry_run = is_dry_run(&req, &opt)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
||||
.await??
|
||||
.into();
|
||||
let task = {
|
||||
let index_scheduler = index_scheduler.clone();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run)).await??
|
||||
};
|
||||
|
||||
if network.sharding && !dry_run {
|
||||
proxy(&index_scheduler, &index_uid, &req, network, Body::Inline(body), &task).await?;
|
||||
}
|
||||
|
||||
let task: SummarizedTaskView = task.into();
|
||||
|
||||
debug!(returns = ?task, "Delete documents by batch");
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr, ToSchema)]
|
||||
#[derive(Debug, Deserr, ToSchema, Serialize)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct DocumentDeletionByFilter {
|
||||
@@ -1213,7 +1240,8 @@ pub async fn delete_documents_by_filter(
|
||||
debug!(parameters = ?body, "Delete documents by filter");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let index_uid = index_uid.into_inner();
|
||||
let filter = body.into_inner().filter;
|
||||
let filter = body.into_inner();
|
||||
let network = index_scheduler.network();
|
||||
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
@@ -1226,23 +1254,36 @@ pub async fn delete_documents_by_filter(
|
||||
);
|
||||
|
||||
// we ensure the filter is well formed before enqueuing it
|
||||
crate::search::parse_filter(&filter, Code::InvalidDocumentFilter, index_scheduler.features())?
|
||||
.ok_or(MeilisearchHttpError::EmptyFilter)?;
|
||||
crate::search::parse_filter(
|
||||
&filter.filter,
|
||||
Code::InvalidDocumentFilter,
|
||||
index_scheduler.features(),
|
||||
)?
|
||||
.ok_or(MeilisearchHttpError::EmptyFilter)?;
|
||||
|
||||
let task = KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr: filter };
|
||||
let task = KindWithContent::DocumentDeletionByFilter {
|
||||
index_uid: index_uid.clone(),
|
||||
filter_expr: filter.filter.clone(),
|
||||
};
|
||||
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
let dry_run = is_dry_run(&req, &opt)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
||||
.await??
|
||||
.into();
|
||||
let task = {
|
||||
let index_scheduler = index_scheduler.clone();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run)).await??
|
||||
};
|
||||
|
||||
if network.sharding && !dry_run {
|
||||
proxy(&index_scheduler, &index_uid, &req, network, Body::Inline(filter), &task).await?;
|
||||
}
|
||||
|
||||
let task: SummarizedTaskView = task.into();
|
||||
|
||||
debug!(returns = ?task, "Delete documents by filter");
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr, ToSchema)]
|
||||
#[derive(Debug, Deserr, ToSchema, Serialize)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct DocumentEditionByFunction {
|
||||
/// A string containing a RHAI function.
|
||||
@@ -1330,6 +1371,8 @@ pub async fn edit_documents_by_function(
|
||||
.features()
|
||||
.check_edit_documents_by_function("Using the documents edit route")?;
|
||||
|
||||
let network = index_scheduler.network();
|
||||
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let index_uid = index_uid.into_inner();
|
||||
let params = params.into_inner();
|
||||
@@ -1343,13 +1386,12 @@ pub async fn edit_documents_by_function(
|
||||
&req,
|
||||
);
|
||||
|
||||
let DocumentEditionByFunction { filter, context, function } = params;
|
||||
let engine = milli::rhai::Engine::new();
|
||||
if let Err(e) = engine.compile(&function) {
|
||||
if let Err(e) = engine.compile(¶ms.function) {
|
||||
return Err(ResponseError::from_msg(e.to_string(), Code::BadRequest));
|
||||
}
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
if let Some(ref filter) = params.filter {
|
||||
// we ensure the filter is well formed before enqueuing it
|
||||
crate::search::parse_filter(
|
||||
filter,
|
||||
@@ -1359,9 +1401,9 @@ pub async fn edit_documents_by_function(
|
||||
.ok_or(MeilisearchHttpError::EmptyFilter)?;
|
||||
}
|
||||
let task = KindWithContent::DocumentEdition {
|
||||
index_uid,
|
||||
filter_expr: filter,
|
||||
context: match context {
|
||||
index_uid: index_uid.clone(),
|
||||
filter_expr: params.filter.clone(),
|
||||
context: match params.context.clone() {
|
||||
Some(Value::Object(m)) => Some(m),
|
||||
None => None,
|
||||
_ => {
|
||||
@@ -1371,15 +1413,21 @@ pub async fn edit_documents_by_function(
|
||||
))
|
||||
}
|
||||
},
|
||||
function,
|
||||
function: params.function.clone(),
|
||||
};
|
||||
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
let dry_run = is_dry_run(&req, &opt)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
||||
.await??
|
||||
.into();
|
||||
let task = {
|
||||
let index_scheduler = index_scheduler.clone();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run)).await??
|
||||
};
|
||||
|
||||
if network.sharding && !dry_run {
|
||||
proxy(&index_scheduler, &index_uid, &req, network, Body::Inline(params), &task).await?;
|
||||
}
|
||||
|
||||
let task: SummarizedTaskView = task.into();
|
||||
|
||||
debug!(returns = ?task, "Edit documents by function");
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
@@ -1422,6 +1470,8 @@ pub async fn clear_all_documents(
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let network = index_scheduler.network();
|
||||
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
clear_all: true,
|
||||
@@ -1435,10 +1485,18 @@ pub async fn clear_all_documents(
|
||||
let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() };
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
let dry_run = is_dry_run(&req, &opt)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
||||
.await??
|
||||
.into();
|
||||
|
||||
let task = {
|
||||
let index_scheduler = index_scheduler.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run)).await??
|
||||
};
|
||||
|
||||
if network.sharding && !dry_run {
|
||||
proxy(&index_scheduler, &index_uid, &req, network, Body::none(), &task).await?;
|
||||
}
|
||||
|
||||
let task: SummarizedTaskView = task.into();
|
||||
|
||||
debug!(returns = ?task, "Delete all documents");
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
@@ -1467,9 +1525,13 @@ fn some_documents<'a, 't: 'a>(
|
||||
Some(Value::Object(map)) => map,
|
||||
_ => Default::default(),
|
||||
};
|
||||
for (name, (vector, regenerate)) in index.embeddings(rtxn, key)? {
|
||||
for (
|
||||
name,
|
||||
EmbeddingsWithMetadata { embeddings, regenerate, has_fragments: _ },
|
||||
) in index.embeddings(rtxn, key)?
|
||||
{
|
||||
let embeddings =
|
||||
ExplicitVectors { embeddings: Some(vector.into()), regenerate };
|
||||
ExplicitVectors { embeddings: Some(embeddings.into()), regenerate };
|
||||
vectors.insert(
|
||||
name,
|
||||
serde_json::to_value(embeddings).map_err(MeilisearchHttpError::from)?,
|
||||
@@ -1494,6 +1556,7 @@ fn retrieve_documents<S: AsRef<str>>(
|
||||
attributes_to_retrieve: Option<Vec<S>>,
|
||||
retrieve_vectors: RetrieveVectors,
|
||||
features: RoFeatures,
|
||||
sort_criteria: Option<Vec<AscDesc>>,
|
||||
) -> Result<(u64, Vec<Document>), ResponseError> {
|
||||
let rtxn = index.read_txn()?;
|
||||
let filter = &filter;
|
||||
@@ -1526,15 +1589,32 @@ fn retrieve_documents<S: AsRef<str>>(
|
||||
})?
|
||||
}
|
||||
|
||||
let (it, number_of_documents) = {
|
||||
let (it, number_of_documents) = if let Some(sort) = sort_criteria {
|
||||
let number_of_documents = candidates.len();
|
||||
let facet_sort = recursive_sort(index, &rtxn, sort, &candidates)?;
|
||||
let iter = facet_sort.iter()?;
|
||||
let mut documents = Vec::with_capacity(limit);
|
||||
for result in iter.skip(offset).take(limit) {
|
||||
documents.push(result?);
|
||||
}
|
||||
(
|
||||
itertools::Either::Left(some_documents(
|
||||
index,
|
||||
&rtxn,
|
||||
documents.into_iter(),
|
||||
retrieve_vectors,
|
||||
)?),
|
||||
number_of_documents,
|
||||
)
|
||||
} else {
|
||||
let number_of_documents = candidates.len();
|
||||
(
|
||||
some_documents(
|
||||
itertools::Either::Right(some_documents(
|
||||
index,
|
||||
&rtxn,
|
||||
candidates.into_iter().skip(offset).take(limit),
|
||||
retrieve_vectors,
|
||||
)?,
|
||||
)?),
|
||||
number_of_documents,
|
||||
)
|
||||
};
|
||||
|
||||
@@ -30,6 +30,7 @@ use crate::Opt;
|
||||
|
||||
pub mod documents;
|
||||
pub mod facet_search;
|
||||
mod proxy;
|
||||
pub mod search;
|
||||
mod search_analytics;
|
||||
#[cfg(test)]
|
||||
@@ -39,6 +40,8 @@ mod settings_analytics;
|
||||
pub mod similar;
|
||||
mod similar_analytics;
|
||||
|
||||
pub use proxy::{PROXY_ORIGIN_REMOTE_HEADER, PROXY_ORIGIN_TASK_UID_HEADER};
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
nest(
|
||||
|
||||
424
crates/meilisearch/src/routes/indexes/proxy.rs
Normal file
424
crates/meilisearch/src/routes/indexes/proxy.rs
Normal file
@@ -0,0 +1,424 @@
|
||||
// Copyright © 2025 Meilisearch Some Rights Reserved
|
||||
// This file is part of Meilisearch Enterprise Edition (EE).
|
||||
// Use of this source code is governed by the Business Source License 1.1,
|
||||
// as found in the LICENSE-EE file or at <https://mariadb.com/bsl11>
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::File;
|
||||
|
||||
use actix_web::http::header::CONTENT_TYPE;
|
||||
use actix_web::HttpRequest;
|
||||
use bytes::Bytes;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::tasks::{Origin, RemoteTask, TaskNetwork};
|
||||
use reqwest::StatusCode;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::routes::indexes::proxy::error::{ProxyDocumentChangeError, ReqwestErrorWithoutUrl};
|
||||
use crate::routes::SummarizedTaskView;
|
||||
|
||||
pub enum Body<T: serde::Serialize> {
|
||||
NdJsonPayload(File),
|
||||
Inline(T),
|
||||
None,
|
||||
}
|
||||
|
||||
impl Body<()> {
|
||||
pub fn with_ndjson_payload(file: File) -> Self {
|
||||
Self::NdJsonPayload(file)
|
||||
}
|
||||
|
||||
pub fn none() -> Self {
|
||||
Self::None
|
||||
}
|
||||
}
|
||||
|
||||
/// If necessary, proxies the passed request to the network and update the task description.
|
||||
///
|
||||
/// This function reads the custom headers from the request to determine if must proxy the request or if the request
|
||||
/// has already been proxied.
|
||||
///
|
||||
/// - when it must proxy the request, the endpoint, method and query params are retrieved from the passed `req`, then the `body` is
|
||||
/// sent to all remotes of the `network` (except `self`). The response from the remotes are collected to update the passed `task`
|
||||
/// with the task ids from the task queues of the remotes.
|
||||
/// - when the request has already been proxied, the custom headers contains information about the remote that created the initial task.
|
||||
/// This information is copied to the passed task.
|
||||
pub async fn proxy<T: serde::Serialize>(
|
||||
index_scheduler: &IndexScheduler,
|
||||
index_uid: &str,
|
||||
req: &HttpRequest,
|
||||
network: meilisearch_types::network::Network,
|
||||
body: Body<T>,
|
||||
task: &meilisearch_types::tasks::Task,
|
||||
) -> Result<(), MeilisearchHttpError> {
|
||||
match origin_from_req(req)? {
|
||||
Some(origin) => {
|
||||
index_scheduler.set_task_network(task.uid, TaskNetwork::Origin { origin })?
|
||||
}
|
||||
None => {
|
||||
let this = network
|
||||
.local
|
||||
.as_deref()
|
||||
.expect("inconsistent `network.sharding` and `network.self`")
|
||||
.to_owned();
|
||||
|
||||
let content_type = match &body {
|
||||
// for file bodies, force x-ndjson
|
||||
Body::NdJsonPayload(_) => Some(b"application/x-ndjson".as_slice()),
|
||||
// otherwise get content type from request
|
||||
_ => req.headers().get(CONTENT_TYPE).map(|h| h.as_bytes()),
|
||||
};
|
||||
|
||||
let body = match body {
|
||||
Body::NdJsonPayload(file) => Some(Bytes::from_owner(unsafe {
|
||||
memmap2::Mmap::map(&file).map_err(|err| {
|
||||
MeilisearchHttpError::from_milli(err.into(), Some(index_uid.to_owned()))
|
||||
})?
|
||||
})),
|
||||
|
||||
Body::Inline(payload) => {
|
||||
Some(Bytes::copy_from_slice(&serde_json::to_vec(&payload).unwrap()))
|
||||
}
|
||||
|
||||
Body::None => None,
|
||||
};
|
||||
|
||||
let mut in_flight_remote_queries = BTreeMap::new();
|
||||
let client = reqwest::ClientBuilder::new()
|
||||
.connect_timeout(std::time::Duration::from_secs(3))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let method = from_old_http_method(req.method());
|
||||
|
||||
// send payload to all remotes
|
||||
for (node_name, node) in
|
||||
network.remotes.into_iter().filter(|(name, _)| name.as_str() != this)
|
||||
{
|
||||
let body = body.clone();
|
||||
let client = client.clone();
|
||||
let api_key = node.write_api_key;
|
||||
let this = this.clone();
|
||||
let method = method.clone();
|
||||
let path_and_query =
|
||||
req.uri().path_and_query().map(|paq| paq.as_str()).unwrap_or("/");
|
||||
|
||||
in_flight_remote_queries.insert(
|
||||
node_name,
|
||||
tokio::spawn({
|
||||
let url = format!("{}{}", node.url, path_and_query);
|
||||
|
||||
let url_encoded_this = urlencoding::encode(&this).into_owned();
|
||||
let url_encoded_task_uid = task.uid.to_string(); // it's url encoded i promize
|
||||
|
||||
let content_type = content_type.map(|b| b.to_owned());
|
||||
|
||||
let backoff = backoff::ExponentialBackoffBuilder::new()
|
||||
.with_max_elapsed_time(Some(std::time::Duration::from_secs(25)))
|
||||
.build();
|
||||
|
||||
backoff::future::retry(backoff, move || {
|
||||
let url = url.clone();
|
||||
let client = client.clone();
|
||||
let url_encoded_this = url_encoded_this.clone();
|
||||
let url_encoded_task_uid = url_encoded_task_uid.clone();
|
||||
let content_type = content_type.clone();
|
||||
|
||||
let body = body.clone();
|
||||
let api_key = api_key.clone();
|
||||
let method = method.clone();
|
||||
|
||||
async move {
|
||||
try_proxy(
|
||||
method,
|
||||
&url,
|
||||
content_type.as_deref(),
|
||||
api_key.as_deref(),
|
||||
&client,
|
||||
&url_encoded_this,
|
||||
&url_encoded_task_uid,
|
||||
body,
|
||||
)
|
||||
.await
|
||||
}
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
// wait for all in-flight queries to finish and collect their results
|
||||
let mut remote_tasks: BTreeMap<String, RemoteTask> = BTreeMap::new();
|
||||
for (node_name, handle) in in_flight_remote_queries {
|
||||
match handle.await {
|
||||
Ok(Ok(res)) => {
|
||||
let task_uid = res.task_uid;
|
||||
|
||||
remote_tasks.insert(node_name, Ok(task_uid).into());
|
||||
}
|
||||
Ok(Err(error)) => {
|
||||
remote_tasks.insert(node_name, Err(error.as_response_error()).into());
|
||||
}
|
||||
Err(panic) => match panic.try_into_panic() {
|
||||
Ok(panic) => {
|
||||
let msg = match panic.downcast_ref::<&'static str>() {
|
||||
Some(s) => *s,
|
||||
None => match panic.downcast_ref::<String>() {
|
||||
Some(s) => &s[..],
|
||||
None => "Box<dyn Any>",
|
||||
},
|
||||
};
|
||||
remote_tasks.insert(
|
||||
node_name,
|
||||
Err(ResponseError::from_msg(
|
||||
msg.to_string(),
|
||||
meilisearch_types::error::Code::Internal,
|
||||
))
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
Err(_) => {
|
||||
tracing::error!("proxy task was unexpectedly cancelled")
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// edit details to contain the return values from the remotes
|
||||
index_scheduler.set_task_network(task.uid, TaskNetwork::Remotes { remote_tasks })?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn from_old_http_method(method: &actix_http::Method) -> reqwest::Method {
|
||||
match method {
|
||||
&actix_http::Method::CONNECT => reqwest::Method::CONNECT,
|
||||
&actix_http::Method::DELETE => reqwest::Method::DELETE,
|
||||
&actix_http::Method::GET => reqwest::Method::GET,
|
||||
&actix_http::Method::HEAD => reqwest::Method::HEAD,
|
||||
&actix_http::Method::OPTIONS => reqwest::Method::OPTIONS,
|
||||
&actix_http::Method::PATCH => reqwest::Method::PATCH,
|
||||
&actix_http::Method::POST => reqwest::Method::POST,
|
||||
&actix_http::Method::PUT => reqwest::Method::PUT,
|
||||
&actix_http::Method::TRACE => reqwest::Method::TRACE,
|
||||
method => reqwest::Method::from_bytes(method.as_str().as_bytes()).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn try_proxy(
|
||||
method: reqwest::Method,
|
||||
url: &str,
|
||||
content_type: Option<&[u8]>,
|
||||
api_key: Option<&str>,
|
||||
client: &reqwest::Client,
|
||||
url_encoded_this: &str,
|
||||
url_encoded_task_uid: &str,
|
||||
body: Option<Bytes>,
|
||||
) -> Result<SummarizedTaskView, backoff::Error<ProxyDocumentChangeError>> {
|
||||
let request = client.request(method, url).timeout(std::time::Duration::from_secs(30));
|
||||
let request = if let Some(body) = body { request.body(body) } else { request };
|
||||
let request = if let Some(api_key) = api_key { request.bearer_auth(api_key) } else { request };
|
||||
let request = request.header(PROXY_ORIGIN_TASK_UID_HEADER, url_encoded_task_uid);
|
||||
let request = request.header(PROXY_ORIGIN_REMOTE_HEADER, url_encoded_this);
|
||||
let request = if let Some(content_type) = content_type {
|
||||
request.header(CONTENT_TYPE.as_str(), content_type)
|
||||
} else {
|
||||
request
|
||||
};
|
||||
|
||||
let response = request.send().await;
|
||||
let response = match response {
|
||||
Ok(response) => response,
|
||||
Err(error) if error.is_timeout() => {
|
||||
return Err(backoff::Error::transient(ProxyDocumentChangeError::Timeout))
|
||||
}
|
||||
Err(error) => {
|
||||
return Err(backoff::Error::transient(ProxyDocumentChangeError::CouldNotSendRequest(
|
||||
ReqwestErrorWithoutUrl::new(error),
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
match response.status() {
|
||||
status_code if status_code.is_success() => (),
|
||||
StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => {
|
||||
return Err(backoff::Error::Permanent(ProxyDocumentChangeError::AuthenticationError))
|
||||
}
|
||||
status_code if status_code.is_client_error() => {
|
||||
let response = parse_error(response).await;
|
||||
return Err(backoff::Error::Permanent(ProxyDocumentChangeError::BadRequest {
|
||||
status_code,
|
||||
response,
|
||||
}));
|
||||
}
|
||||
status_code if status_code.is_server_error() => {
|
||||
let response = parse_error(response).await;
|
||||
return Err(backoff::Error::transient(ProxyDocumentChangeError::RemoteError {
|
||||
status_code,
|
||||
response,
|
||||
}));
|
||||
}
|
||||
status_code => {
|
||||
tracing::warn!(
|
||||
status_code = status_code.as_u16(),
|
||||
"remote replied with unexpected status code"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let response = match parse_response(response).await {
|
||||
Ok(response) => response,
|
||||
Err(response) => {
|
||||
return Err(backoff::Error::transient(
|
||||
ProxyDocumentChangeError::CouldNotParseResponse { response },
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
async fn parse_error(response: reqwest::Response) -> Result<String, ReqwestErrorWithoutUrl> {
|
||||
let bytes = match response.bytes().await {
|
||||
Ok(bytes) => bytes,
|
||||
Err(error) => return Err(ReqwestErrorWithoutUrl::new(error)),
|
||||
};
|
||||
|
||||
Ok(parse_bytes_as_error(&bytes))
|
||||
}
|
||||
|
||||
fn parse_bytes_as_error(bytes: &[u8]) -> String {
|
||||
match serde_json::from_slice::<Value>(bytes) {
|
||||
Ok(value) => value.to_string(),
|
||||
Err(_) => String::from_utf8_lossy(bytes).into_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn parse_response<T: DeserializeOwned>(
|
||||
response: reqwest::Response,
|
||||
) -> Result<T, Result<String, ReqwestErrorWithoutUrl>> {
|
||||
let bytes = match response.bytes().await {
|
||||
Ok(bytes) => bytes,
|
||||
Err(error) => return Err(Err(ReqwestErrorWithoutUrl::new(error))),
|
||||
};
|
||||
|
||||
match serde_json::from_slice::<T>(&bytes) {
|
||||
Ok(value) => Ok(value),
|
||||
Err(_) => Err(Ok(parse_bytes_as_error(&bytes))),
|
||||
}
|
||||
}
|
||||
|
||||
mod error {
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use reqwest::StatusCode;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ProxyDocumentChangeError {
|
||||
#[error("{0}")]
|
||||
CouldNotSendRequest(ReqwestErrorWithoutUrl),
|
||||
#[error("could not authenticate against the remote host\n - hint: check that the remote instance was registered with a valid API key having the `documents.add` action")]
|
||||
AuthenticationError,
|
||||
#[error(
|
||||
"could not parse response from the remote host as a document addition response{}\n - hint: check that the remote instance is a Meilisearch instance running the same version",
|
||||
response_from_remote(response)
|
||||
)]
|
||||
CouldNotParseResponse { response: Result<String, ReqwestErrorWithoutUrl> },
|
||||
#[error("remote host responded with code {}{}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance", status_code.as_u16(), response_from_remote(response))]
|
||||
BadRequest { status_code: StatusCode, response: Result<String, ReqwestErrorWithoutUrl> },
|
||||
#[error("remote host did not answer before the deadline")]
|
||||
Timeout,
|
||||
#[error("remote host responded with code {}{}", status_code.as_u16(), response_from_remote(response))]
|
||||
RemoteError { status_code: StatusCode, response: Result<String, ReqwestErrorWithoutUrl> },
|
||||
}
|
||||
|
||||
impl ProxyDocumentChangeError {
|
||||
pub fn as_response_error(&self) -> ResponseError {
|
||||
use meilisearch_types::error::Code;
|
||||
let message = self.to_string();
|
||||
let code = match self {
|
||||
ProxyDocumentChangeError::CouldNotSendRequest(_) => Code::RemoteCouldNotSendRequest,
|
||||
ProxyDocumentChangeError::AuthenticationError => Code::RemoteInvalidApiKey,
|
||||
ProxyDocumentChangeError::BadRequest { .. } => Code::RemoteBadRequest,
|
||||
ProxyDocumentChangeError::Timeout => Code::RemoteTimeout,
|
||||
ProxyDocumentChangeError::RemoteError { .. } => Code::RemoteRemoteError,
|
||||
ProxyDocumentChangeError::CouldNotParseResponse { .. } => Code::RemoteBadResponse,
|
||||
};
|
||||
ResponseError::from_msg(message, code)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
pub struct ReqwestErrorWithoutUrl(reqwest::Error);
|
||||
impl ReqwestErrorWithoutUrl {
|
||||
pub fn new(inner: reqwest::Error) -> Self {
|
||||
Self(inner.without_url())
|
||||
}
|
||||
}
|
||||
|
||||
fn response_from_remote(response: &Result<String, ReqwestErrorWithoutUrl>) -> String {
|
||||
match response {
|
||||
Ok(response) => {
|
||||
format!(":\n - response from remote: {}", response)
|
||||
}
|
||||
Err(error) => {
|
||||
format!(":\n - additionally, could not retrieve response from remote: {error}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const PROXY_ORIGIN_REMOTE_HEADER: &str = "Meili-Proxy-Origin-Remote";
|
||||
pub const PROXY_ORIGIN_TASK_UID_HEADER: &str = "Meili-Proxy-Origin-TaskUid";
|
||||
|
||||
pub fn origin_from_req(req: &HttpRequest) -> Result<Option<Origin>, MeilisearchHttpError> {
|
||||
let (remote_name, task_uid) = match (
|
||||
req.headers().get(PROXY_ORIGIN_REMOTE_HEADER),
|
||||
req.headers().get(PROXY_ORIGIN_TASK_UID_HEADER),
|
||||
) {
|
||||
(None, None) => return Ok(None),
|
||||
(None, Some(_)) => {
|
||||
return Err(MeilisearchHttpError::InconsistentOriginHeaders { is_remote_missing: true })
|
||||
}
|
||||
(Some(_), None) => {
|
||||
return Err(MeilisearchHttpError::InconsistentOriginHeaders {
|
||||
is_remote_missing: false,
|
||||
})
|
||||
}
|
||||
(Some(remote_name), Some(task_uid)) => (
|
||||
urlencoding::decode(remote_name.to_str().map_err(|err| {
|
||||
MeilisearchHttpError::InvalidHeaderValue {
|
||||
header_name: PROXY_ORIGIN_REMOTE_HEADER,
|
||||
msg: format!("while parsing remote name as UTF-8: {err}"),
|
||||
}
|
||||
})?)
|
||||
.map_err(|err| MeilisearchHttpError::InvalidHeaderValue {
|
||||
header_name: PROXY_ORIGIN_REMOTE_HEADER,
|
||||
msg: format!("while URL-decoding remote name: {err}"),
|
||||
})?,
|
||||
urlencoding::decode(task_uid.to_str().map_err(|err| {
|
||||
MeilisearchHttpError::InvalidHeaderValue {
|
||||
header_name: PROXY_ORIGIN_TASK_UID_HEADER,
|
||||
msg: format!("while parsing task UID as UTF-8: {err}"),
|
||||
}
|
||||
})?)
|
||||
.map_err(|err| MeilisearchHttpError::InvalidHeaderValue {
|
||||
header_name: PROXY_ORIGIN_TASK_UID_HEADER,
|
||||
msg: format!("while URL-decoding task UID: {err}"),
|
||||
})?,
|
||||
),
|
||||
};
|
||||
|
||||
let task_uid: usize =
|
||||
task_uid.parse().map_err(|err| MeilisearchHttpError::InvalidHeaderValue {
|
||||
header_name: PROXY_ORIGIN_TASK_UID_HEADER,
|
||||
msg: format!("while parsing the task UID as an integer: {err}"),
|
||||
})?;
|
||||
|
||||
Ok(Some(Origin { remote_name: remote_name.into_owned(), task_uid }))
|
||||
}
|
||||
@@ -180,7 +180,7 @@ pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
|
||||
.is_some_and(|s| s.to_lowercase() == "true"))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
#[derive(Debug, Serialize, Deserialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SummarizedTaskView {
|
||||
/// The task unique identifier.
|
||||
@@ -194,7 +194,10 @@ pub struct SummarizedTaskView {
|
||||
#[serde(rename = "type")]
|
||||
kind: Kind,
|
||||
/// The date on which the task was enqueued.
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
#[serde(
|
||||
serialize_with = "time::serde::rfc3339::serialize",
|
||||
deserialize_with = "time::serde::rfc3339::deserialize"
|
||||
)]
|
||||
enqueued_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
|
||||
@@ -8,12 +8,13 @@ use index_scheduler::IndexScheduler;
|
||||
use itertools::{EitherOrBoth, Itertools};
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::deserr_codes::{
|
||||
InvalidNetworkRemotes, InvalidNetworkSearchApiKey, InvalidNetworkSelf, InvalidNetworkUrl,
|
||||
InvalidNetworkRemotes, InvalidNetworkSearchApiKey, InvalidNetworkSelf, InvalidNetworkSharding,
|
||||
InvalidNetworkUrl, InvalidNetworkWriteApiKey,
|
||||
};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::features::{Network as DbNetwork, Remote as DbRemote};
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::network::{Network as DbNetwork, Remote as DbRemote};
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
@@ -57,9 +58,9 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
{
|
||||
"self": "ms-0",
|
||||
"remotes": {
|
||||
"ms-0": Remote { url: Setting::Set("http://localhost:7700".into()), search_api_key: Setting::Reset },
|
||||
"ms-1": Remote { url: Setting::Set("http://localhost:7701".into()), search_api_key: Setting::Set("foo".into()) },
|
||||
"ms-2": Remote { url: Setting::Set("http://localhost:7702".into()), search_api_key: Setting::Set("bar".into()) },
|
||||
"ms-0": Remote { url: Setting::Set("http://localhost:7700".into()), search_api_key: Setting::Reset, write_api_key: Setting::Reset },
|
||||
"ms-1": Remote { url: Setting::Set("http://localhost:7701".into()), search_api_key: Setting::Set("foo".into()), write_api_key: Setting::Set("bar".into()) },
|
||||
"ms-2": Remote { url: Setting::Set("http://localhost:7702".into()), search_api_key: Setting::Set("bar".into()), write_api_key: Setting::Set("foo".into()) },
|
||||
}
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
@@ -88,9 +89,9 @@ async fn get_network(
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct Remote {
|
||||
#[schema(value_type = Option<String>, example = json!({
|
||||
"ms-0": Remote { url: Setting::Set("http://localhost:7700".into()), search_api_key: Setting::Reset },
|
||||
"ms-1": Remote { url: Setting::Set("http://localhost:7701".into()), search_api_key: Setting::Set("foo".into()) },
|
||||
"ms-2": Remote { url: Setting::Set("http://localhost:7702".into()), search_api_key: Setting::Set("bar".into()) },
|
||||
"ms-0": Remote { url: Setting::Set("http://localhost:7700".into()), search_api_key: Setting::Reset, write_api_key: Setting::Reset },
|
||||
"ms-1": Remote { url: Setting::Set("http://localhost:7701".into()), search_api_key: Setting::Set("foo".into()), write_api_key: Setting::Set("bar".into()) },
|
||||
"ms-2": Remote { url: Setting::Set("http://localhost:7702".into()), search_api_key: Setting::Set("bar".into()), write_api_key: Setting::Set("foo".into()) },
|
||||
}))]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidNetworkUrl>)]
|
||||
#[serde(default)]
|
||||
@@ -99,6 +100,10 @@ pub struct Remote {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidNetworkSearchApiKey>)]
|
||||
#[serde(default)]
|
||||
pub search_api_key: Setting<String>,
|
||||
#[schema(value_type = Option<String>, example = json!("XWnBI8QHUc-4IlqbKPLUDuhftNq19mQtjc6JvmivzJU"))]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidNetworkWriteApiKey>)]
|
||||
#[serde(default)]
|
||||
pub write_api_key: Setting<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr, ToSchema, Serialize)]
|
||||
@@ -114,6 +119,10 @@ pub struct Network {
|
||||
#[serde(default, rename = "self")]
|
||||
#[deserr(default, rename = "self", error = DeserrJsonError<InvalidNetworkSelf>)]
|
||||
pub local: Setting<String>,
|
||||
#[schema(value_type = Option<bool>, example = json!(true))]
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidNetworkSharding>)]
|
||||
pub sharding: Setting<bool>,
|
||||
}
|
||||
|
||||
impl Remote {
|
||||
@@ -136,6 +145,7 @@ impl Remote {
|
||||
Ok(url)
|
||||
})?,
|
||||
search_api_key: self.search_api_key.set(),
|
||||
write_api_key: self.write_api_key.set(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -174,9 +184,9 @@ impl Aggregate for PatchNetworkAnalytics {
|
||||
{
|
||||
"self": "ms-0",
|
||||
"remotes": {
|
||||
"ms-0": Remote { url: Setting::Set("http://localhost:7700".into()), search_api_key: Setting::Reset },
|
||||
"ms-1": Remote { url: Setting::Set("http://localhost:7701".into()), search_api_key: Setting::Set("foo".into()) },
|
||||
"ms-2": Remote { url: Setting::Set("http://localhost:7702".into()), search_api_key: Setting::Set("bar".into()) },
|
||||
"ms-0": Remote { url: Setting::Set("http://localhost:7700".into()), search_api_key: Setting::Reset, write_api_key: Setting::Reset },
|
||||
"ms-1": Remote { url: Setting::Set("http://localhost:7701".into()), search_api_key: Setting::Set("foo".into()), write_api_key: Setting::Set("bar".into()) },
|
||||
"ms-2": Remote { url: Setting::Set("http://localhost:7702".into()), search_api_key: Setting::Set("bar".into()), write_api_key: Setting::Set("foo".into()) },
|
||||
}
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
@@ -207,6 +217,19 @@ async fn patch_network(
|
||||
Setting::NotSet => old_network.local,
|
||||
};
|
||||
|
||||
let merged_sharding = match new_network.sharding {
|
||||
Setting::Set(new_sharding) => new_sharding,
|
||||
Setting::Reset => false,
|
||||
Setting::NotSet => old_network.sharding,
|
||||
};
|
||||
|
||||
if merged_sharding && merged_self.is_none() {
|
||||
return Err(ResponseError::from_msg(
|
||||
"`.sharding`: enabling the sharding requires `.self` to be set\n - Hint: Disable `sharding` or set `self` to a value.".into(),
|
||||
meilisearch_types::error::Code::InvalidNetworkSharding,
|
||||
));
|
||||
}
|
||||
|
||||
let merged_remotes = match new_network.remotes {
|
||||
Setting::Set(new_remotes) => {
|
||||
let mut merged_remotes = BTreeMap::new();
|
||||
@@ -217,9 +240,17 @@ async fn patch_network(
|
||||
{
|
||||
match either_or_both {
|
||||
EitherOrBoth::Both((key, old), (_, Some(new))) => {
|
||||
let DbRemote { url: old_url, search_api_key: old_search_api_key } = old;
|
||||
let DbRemote {
|
||||
url: old_url,
|
||||
search_api_key: old_search_api_key,
|
||||
write_api_key: old_write_api_key,
|
||||
} = old;
|
||||
|
||||
let Remote { url: new_url, search_api_key: new_search_api_key } = new;
|
||||
let Remote {
|
||||
url: new_url,
|
||||
search_api_key: new_search_api_key,
|
||||
write_api_key: new_write_api_key,
|
||||
} = new;
|
||||
|
||||
let merged = DbRemote {
|
||||
url: match new_url {
|
||||
@@ -247,6 +278,11 @@ async fn patch_network(
|
||||
Setting::Reset => None,
|
||||
Setting::NotSet => old_search_api_key,
|
||||
},
|
||||
write_api_key: match new_write_api_key {
|
||||
Setting::Set(new_write_api_key) => Some(new_write_api_key),
|
||||
Setting::Reset => None,
|
||||
Setting::NotSet => old_write_api_key,
|
||||
},
|
||||
};
|
||||
merged_remotes.insert(key, merged);
|
||||
}
|
||||
@@ -274,7 +310,8 @@ async fn patch_network(
|
||||
&req,
|
||||
);
|
||||
|
||||
let merged_network = DbNetwork { local: merged_self, remotes: merged_remotes };
|
||||
let merged_network =
|
||||
DbNetwork { local: merged_self, remotes: merged_remotes, sharding: merged_sharding };
|
||||
index_scheduler.put_network(merged_network.clone())?;
|
||||
debug!(returns = ?merged_network, "Patch network");
|
||||
Ok(HttpResponse::Ok().json(merged_network))
|
||||
|
||||
@@ -10,10 +10,10 @@ use actix_http::StatusCode;
|
||||
use index_scheduler::{IndexScheduler, RoFeatures};
|
||||
use itertools::Itertools;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::features::{Network, Remote};
|
||||
use meilisearch_types::milli::order_by_map::OrderByMap;
|
||||
use meilisearch_types::milli::score_details::{ScoreDetails, WeightedScoreValue};
|
||||
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget, DEFAULT_VALUES_PER_FACET};
|
||||
use meilisearch_types::network::{Network, Remote};
|
||||
use roaring::RoaringBitmap;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
@@ -745,10 +745,9 @@ impl SearchByIndex {
|
||||
match sort.iter().map(|s| milli::AscDesc::from_str(s)).collect() {
|
||||
Ok(sorts) => sorts,
|
||||
Err(asc_desc_error) => {
|
||||
return Err(milli::Error::from(milli::SortError::from(
|
||||
asc_desc_error,
|
||||
))
|
||||
.into())
|
||||
return Err(milli::SortError::from(asc_desc_error)
|
||||
.into_search_error()
|
||||
.into())
|
||||
}
|
||||
};
|
||||
Some(sorts)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pub use error::ProxySearchError;
|
||||
use error::ReqwestErrorWithoutUrl;
|
||||
use meilisearch_types::features::Remote;
|
||||
use meilisearch_types::network::Remote;
|
||||
use rand::Rng as _;
|
||||
use reqwest::{Client, Response, StatusCode};
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
@@ -16,7 +16,7 @@ use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::heed::RoTxn;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::locales::Locale;
|
||||
use meilisearch_types::milli::index::{self, SearchParameters};
|
||||
use meilisearch_types::milli::index::{self, EmbeddingsWithMetadata, SearchParameters};
|
||||
use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors;
|
||||
use meilisearch_types::milli::vector::Embedder;
|
||||
@@ -1051,6 +1051,7 @@ pub fn prepare_search<'t>(
|
||||
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS);
|
||||
|
||||
search.exhaustive_number_hits(is_finite_pagination);
|
||||
search.max_total_hits(Some(max_total_hits));
|
||||
search.scoring_strategy(
|
||||
if query.show_ranking_score
|
||||
|| query.show_ranking_score_details
|
||||
@@ -1091,7 +1092,7 @@ pub fn prepare_search<'t>(
|
||||
let sort = match sort.iter().map(|s| AscDesc::from_str(s)).collect() {
|
||||
Ok(sorts) => sorts,
|
||||
Err(asc_desc_error) => {
|
||||
return Err(milli::Error::from(SortError::from(asc_desc_error)).into())
|
||||
return Err(SortError::from(asc_desc_error).into_search_error().into())
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1527,8 +1528,11 @@ impl<'a> HitMaker<'a> {
|
||||
Some(Value::Object(map)) => map,
|
||||
_ => Default::default(),
|
||||
};
|
||||
for (name, (vector, regenerate)) in self.index.embeddings(self.rtxn, id)? {
|
||||
let embeddings = ExplicitVectors { embeddings: Some(vector.into()), regenerate };
|
||||
for (name, EmbeddingsWithMetadata { embeddings, regenerate, has_fragments: _ }) in
|
||||
self.index.embeddings(self.rtxn, id)?
|
||||
{
|
||||
let embeddings =
|
||||
ExplicitVectors { embeddings: Some(embeddings.into()), regenerate };
|
||||
vectors.insert(
|
||||
name,
|
||||
serde_json::to_value(embeddings).map_err(InternalError::SerdeJson)?,
|
||||
|
||||
@@ -419,14 +419,14 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r#"
|
||||
{
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -790,7 +790,7 @@ async fn list_api_keys() {
|
||||
meili_snap::snapshot!(code, @"201 Created");
|
||||
|
||||
let (response, code) = server.list_api_keys("").await;
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".results[].createdAt" => "[ignored]", ".results[].updatedAt" => "[ignored]", ".results[].uid" => "[ignored]", ".results[].key" => "[ignored]" }), @r###"
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".results[].createdAt" => "[ignored]", ".results[].updatedAt" => "[ignored]", ".results[].uid" => "[ignored]", ".results[].key" => "[ignored]" }), @r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
@@ -850,6 +850,22 @@ async fn list_api_keys() {
|
||||
"createdAt": "[ignored]",
|
||||
"updatedAt": "[ignored]"
|
||||
},
|
||||
{
|
||||
"name": "Default Read-Only Admin API Key",
|
||||
"description": "Use it to read information across the whole database. Caution! Do not expose this key on a public frontend",
|
||||
"key": "[ignored]",
|
||||
"uid": "[ignored]",
|
||||
"actions": [
|
||||
"*.get",
|
||||
"keys.get"
|
||||
],
|
||||
"indexes": [
|
||||
"*"
|
||||
],
|
||||
"expiresAt": null,
|
||||
"createdAt": "[ignored]",
|
||||
"updatedAt": "[ignored]"
|
||||
},
|
||||
{
|
||||
"name": "Default Chat API Key",
|
||||
"description": "Use it to chat and search from the frontend",
|
||||
@@ -869,9 +885,9 @@ async fn list_api_keys() {
|
||||
],
|
||||
"offset": 0,
|
||||
"limit": 20,
|
||||
"total": 4
|
||||
"total": 5
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
}
|
||||
|
||||
|
||||
@@ -91,14 +91,14 @@ async fn create_api_key_bad_actions() {
|
||||
// can't parse
|
||||
let (response, code) = server.add_api_key(json!({ "actions": ["doggo"] })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -562,5 +562,7 @@ pub struct GetAllDocumentsOptions {
|
||||
pub offset: Option<usize>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub fields: Option<Vec<&'static str>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sort: Option<Vec<&'static str>>,
|
||||
pub retrieve_vectors: bool,
|
||||
}
|
||||
|
||||
@@ -3,8 +3,12 @@ pub mod index;
|
||||
pub mod server;
|
||||
pub mod service;
|
||||
|
||||
use std::fmt::{self, Display};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt::{self, Display},
|
||||
};
|
||||
|
||||
use actix_http::StatusCode;
|
||||
#[allow(unused)]
|
||||
pub use index::GetAllDocumentsOptions;
|
||||
use meili_snap::json_string;
|
||||
@@ -13,6 +17,10 @@ use serde::{Deserialize, Serialize};
|
||||
#[allow(unused)]
|
||||
pub use server::{default_settings, Server};
|
||||
use tokio::sync::OnceCell;
|
||||
use wiremock::{
|
||||
matchers::{method, path},
|
||||
Mock, MockServer, Request, ResponseTemplate,
|
||||
};
|
||||
|
||||
use crate::common::index::Index;
|
||||
|
||||
@@ -508,3 +516,166 @@ pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn shared_index_for_fragments() -> Index<'static, Shared> {
|
||||
static INDEX: OnceCell<(Server<Shared>, String)> = OnceCell::const_new();
|
||||
let (server, uid) = INDEX
|
||||
.get_or_init(|| async {
|
||||
let (server, uid, _) = init_fragments_index().await;
|
||||
(server.into_shared(), uid)
|
||||
})
|
||||
.await;
|
||||
server._index(uid).to_shared()
|
||||
}
|
||||
|
||||
async fn fragment_mock_server() -> String {
|
||||
let text_to_embedding: BTreeMap<_, _> = vec![
|
||||
("kefir", [0.5, -0.5, 0.0]),
|
||||
("intel", [1.0, 1.0, 0.0]),
|
||||
("dustin", [-0.5, 0.5, 0.0]),
|
||||
("bulldog", [0.0, 0.0, 1.0]),
|
||||
("labrador", [0.0, 0.0, -1.0]),
|
||||
("{{ doc.", [-9999.0, -9999.0, -9999.0]), // If a template didn't render
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/"))
|
||||
.respond_with(move |req: &Request| {
|
||||
let text = String::from_utf8_lossy(&req.body).to_string();
|
||||
|
||||
let mut data = [0.0, 0.0, 0.0];
|
||||
for (inner_text, inner_data) in &text_to_embedding {
|
||||
if text.contains(inner_text) {
|
||||
for (i, &value) in inner_data.iter().enumerate() {
|
||||
data[i] += value;
|
||||
}
|
||||
}
|
||||
}
|
||||
ResponseTemplate::new(200).set_body_json(json!({ "data": data }))
|
||||
})
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
|
||||
mock_server.uri()
|
||||
}
|
||||
|
||||
pub async fn init_fragments_index() -> (Server<Owned>, String, crate::common::Value) {
|
||||
let url = fragment_mock_server().await;
|
||||
let server = Server::new().await;
|
||||
let index = server.unique_index();
|
||||
|
||||
let (_response, code) = server.set_features(json!({"multimodal": true})).await;
|
||||
assert_eq!(code, StatusCode::OK);
|
||||
|
||||
// Configure the index to use our mock embedder
|
||||
let settings = json!({
|
||||
"embedders": {
|
||||
"rest": {
|
||||
"source": "rest",
|
||||
"url": url,
|
||||
"dimensions": 3,
|
||||
"request": "{{fragment}}",
|
||||
"response": {
|
||||
"data": "{{embedding}}"
|
||||
},
|
||||
"indexingFragments": {
|
||||
"withBreed": {"value": "{{ doc.name }} is a {{ doc.breed }}"},
|
||||
"basic": {"value": "{{ doc.name }} is a dog"},
|
||||
},
|
||||
"searchFragments": {
|
||||
"justBreed": {"value": "It's a {{ media.breed }}"},
|
||||
"justName": {"value": "{{ media.name }} is a dog"},
|
||||
"query": {"value": "Some pre-prompt for query {{ q }}"},
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
let (response, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, StatusCode::ACCEPTED);
|
||||
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// Send documents
|
||||
let documents = json!([
|
||||
{"id": 0, "name": "kefir"},
|
||||
{"id": 1, "name": "echo", "_vectors": { "rest": [1, 1, 1] }},
|
||||
{"id": 2, "name": "intel", "breed": "labrador"},
|
||||
{"id": 3, "name": "dustin", "breed": "bulldog"},
|
||||
]);
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, StatusCode::ACCEPTED);
|
||||
|
||||
let _task = index.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let uid = index.uid.clone();
|
||||
(server, uid, settings)
|
||||
}
|
||||
|
||||
pub async fn init_fragments_index_composite() -> (Server<Owned>, String, crate::common::Value) {
|
||||
let url = fragment_mock_server().await;
|
||||
let server = Server::new().await;
|
||||
let index = server.unique_index();
|
||||
|
||||
let (_response, code) = server.set_features(json!({"multimodal": true})).await;
|
||||
assert_eq!(code, StatusCode::OK);
|
||||
|
||||
let (_response, code) = server.set_features(json!({"compositeEmbedders": true})).await;
|
||||
assert_eq!(code, StatusCode::OK);
|
||||
|
||||
// Configure the index to use our mock embedder
|
||||
let settings = json!({
|
||||
"embedders": {
|
||||
"rest": {
|
||||
"source": "composite",
|
||||
"searchEmbedder": {
|
||||
"source": "rest",
|
||||
"url": url,
|
||||
"dimensions": 3,
|
||||
"request": "{{fragment}}",
|
||||
"response": {
|
||||
"data": "{{embedding}}"
|
||||
},
|
||||
"searchFragments": {
|
||||
"query": {"value": "Some pre-prompt for query {{ q }}"},
|
||||
}
|
||||
},
|
||||
"indexingEmbedder": {
|
||||
"source": "rest",
|
||||
"url": url,
|
||||
"dimensions": 3,
|
||||
"request": "{{fragment}}",
|
||||
"response": {
|
||||
"data": "{{embedding}}"
|
||||
},
|
||||
"indexingFragments": {
|
||||
"withBreed": {"value": "{{ doc.name }} is a {{ doc.breed }}"},
|
||||
"basic": {"value": "{{ doc.name }} is a dog"},
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
let (response, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, StatusCode::ACCEPTED);
|
||||
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// Send documents
|
||||
let documents = json!([
|
||||
{"id": 0, "name": "kefir"},
|
||||
{"id": 1, "name": "echo", "_vectors": { "rest": [1, 1, 1] }},
|
||||
{"id": 2, "name": "intel", "breed": "labrador"},
|
||||
{"id": 3, "name": "dustin", "breed": "bulldog"},
|
||||
]);
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, StatusCode::ACCEPTED);
|
||||
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let uid = index.uid.clone();
|
||||
(server, uid, settings)
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ pub struct Server<State = Owned> {
|
||||
pub static TEST_TEMP_DIR: Lazy<TempDir> = Lazy::new(|| TempDir::new().unwrap());
|
||||
|
||||
impl Server<Owned> {
|
||||
fn into_shared(self) -> Server<Shared> {
|
||||
pub(super) fn into_shared(self) -> Server<Shared> {
|
||||
Server { service: self.service, _dir: self._dir, _marker: PhantomData }
|
||||
}
|
||||
|
||||
@@ -97,6 +97,7 @@ impl Server<Owned> {
|
||||
self.use_api_key(master_key);
|
||||
let (response, code) = self.list_api_keys("").await;
|
||||
assert_eq!(200, code, "{:?}", response);
|
||||
// TODO: relying on the order of keys is not ideal, we should use the name instead
|
||||
let admin_key = &response["results"][1]["key"];
|
||||
self.use_api_key(admin_key.as_str().unwrap());
|
||||
}
|
||||
@@ -465,6 +466,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
// Having 2 threads makes the tests way faster
|
||||
max_indexing_threads: MaxThreads::from_str("2").unwrap(),
|
||||
experimental_no_edition_2024_for_settings: false,
|
||||
experimental_no_edition_2024_for_dumps: false,
|
||||
},
|
||||
experimental_enable_metrics: false,
|
||||
..Parser::parse_from(None as Option<&str>)
|
||||
|
||||
@@ -5,8 +5,8 @@ use urlencoding::encode as urlencode;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::{
|
||||
shared_does_not_exists_index, shared_empty_index, shared_index_with_test_set,
|
||||
GetAllDocumentsOptions, Server, Value,
|
||||
shared_does_not_exists_index, shared_empty_index, shared_index_with_geo_documents,
|
||||
shared_index_with_test_set, GetAllDocumentsOptions, Server, Value,
|
||||
};
|
||||
use crate::json;
|
||||
|
||||
@@ -83,6 +83,311 @@ async fn get_document() {
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_sorted() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.load_test_set().await;
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings_sortable_attributes(json!(["age", "email", "gender", "name"])).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
fields: Some(vec!["id", "age", "email"]),
|
||||
sort: Some(vec!["age:asc", "email:desc"]),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
let results = response["results"].as_array().unwrap();
|
||||
snapshot!(json_string!(results), @r#"
|
||||
[
|
||||
{
|
||||
"id": 5,
|
||||
"age": 20,
|
||||
"email": "warrenwatson@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"age": 20,
|
||||
"email": "sheliaberry@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 57,
|
||||
"age": 20,
|
||||
"email": "kaitlinconner@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 45,
|
||||
"age": 20,
|
||||
"email": "irenebennett@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 40,
|
||||
"age": 21,
|
||||
"email": "staffordemerson@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 41,
|
||||
"age": 21,
|
||||
"email": "salinasgamble@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 63,
|
||||
"age": 21,
|
||||
"email": "knowleshebert@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 50,
|
||||
"age": 21,
|
||||
"email": "guerramcintyre@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 44,
|
||||
"age": 22,
|
||||
"email": "jonispears@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 56,
|
||||
"age": 23,
|
||||
"email": "tuckerbarry@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 51,
|
||||
"age": 23,
|
||||
"email": "keycervantes@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 60,
|
||||
"age": 23,
|
||||
"email": "jodyherrera@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 70,
|
||||
"age": 23,
|
||||
"email": "glassperkins@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 75,
|
||||
"age": 24,
|
||||
"email": "emmajacobs@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 68,
|
||||
"age": 24,
|
||||
"email": "angelinadyer@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 17,
|
||||
"age": 25,
|
||||
"email": "ortegabrennan@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 76,
|
||||
"age": 25,
|
||||
"email": "claricegardner@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 43,
|
||||
"age": 25,
|
||||
"email": "arnoldbender@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 12,
|
||||
"age": 25,
|
||||
"email": "aidakirby@chorizon.com"
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"age": 26,
|
||||
"email": "kellimendez@chorizon.com"
|
||||
}
|
||||
]
|
||||
"#);
|
||||
|
||||
let (response, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
fields: Some(vec!["id", "gender", "name"]),
|
||||
sort: Some(vec!["gender:asc", "name:asc"]),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
let results = response["results"].as_array().unwrap();
|
||||
snapshot!(json_string!(results), @r#"
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Adeline Flynn",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 12,
|
||||
"name": "Aida Kirby",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 68,
|
||||
"name": "Angelina Dyer",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 15,
|
||||
"name": "Aurelia Contreras",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 36,
|
||||
"name": "Barbra Valenzuela",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 23,
|
||||
"name": "Blanca Mcclain",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 53,
|
||||
"name": "Caitlin Burnett",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 71,
|
||||
"name": "Candace Sawyer",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 65,
|
||||
"name": "Carole Rowland",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 33,
|
||||
"name": "Cecilia Greer",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 38,
|
||||
"name": "Christina Short",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"name": "Chrystal Boyd",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 76,
|
||||
"name": "Clarice Gardner",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 73,
|
||||
"name": "Eleanor Shepherd",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 75,
|
||||
"name": "Emma Jacobs",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 16,
|
||||
"name": "Estella Bass",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 62,
|
||||
"name": "Estelle Ramirez",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 20,
|
||||
"name": "Florence Long",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 42,
|
||||
"name": "Graciela Russell",
|
||||
"gender": "female"
|
||||
}
|
||||
]
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_geosorted() {
|
||||
let index = shared_index_with_geo_documents().await;
|
||||
|
||||
let (response, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
sort: Some(vec!["_geoPoint(45.4777599, 9.1967508):asc"]),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
let results = response["results"].as_array().unwrap();
|
||||
snapshot!(json_string!(results), @r#"
|
||||
[
|
||||
{
|
||||
"id": 2,
|
||||
"name": "La Bella Italia",
|
||||
"address": "456 Elm Street, Townsville",
|
||||
"type": "Italian",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": "45.4777599",
|
||||
"lng": "9.1967508"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Taco Truck",
|
||||
"address": "444 Salsa Street, Burritoville",
|
||||
"type": "Mexican",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": 34.0522,
|
||||
"lng": -118.2437
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "CrĂŞpe Truck",
|
||||
"address": "2 Billig Avenue, Rouenville",
|
||||
"type": "French",
|
||||
"rating": 10
|
||||
}
|
||||
]
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_sort_the_unsortable() {
|
||||
let index = shared_index_with_test_set().await;
|
||||
|
||||
let (response, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
fields: Some(vec!["id", "name"]),
|
||||
sort: Some(vec!["name:asc"]),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Attribute `name` is not sortable. This index does not have configured sortable attributes.",
|
||||
"code": "invalid_document_sort",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_sort"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_unexisting_index_all_documents() {
|
||||
let index = shared_does_not_exists_index().await;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::common::{shared_does_not_exists_index, Server};
|
||||
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -46,7 +46,7 @@ async fn errors_on_param() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown field `selfie`: expected one of `remotes`, `self`",
|
||||
"message": "Unknown field `selfie`: expected one of `remotes`, `self`, `sharding`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
@@ -149,7 +149,7 @@ async fn errors_on_param() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown field `doggo` inside `.remotes.new`: expected one of `url`, `searchApiKey`",
|
||||
"message": "Unknown field `doggo` inside `.remotes.new`: expected one of `url`, `searchApiKey`, `writeApiKey`",
|
||||
"code": "invalid_network_remotes",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_network_remotes"
|
||||
@@ -192,9 +192,11 @@ async fn errors_on_param() {
|
||||
"remotes": {
|
||||
"kefir": {
|
||||
"url": "http://localhost:7700",
|
||||
"searchApiKey": null
|
||||
"searchApiKey": null,
|
||||
"writeApiKey": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = server
|
||||
@@ -266,7 +268,8 @@ async fn auth() {
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "master",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -274,11 +277,12 @@ async fn auth() {
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "master",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
{
|
||||
"self": "master",
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
// try get with get permission
|
||||
server.use_api_key(get_network_key.as_str().unwrap());
|
||||
@@ -286,11 +290,12 @@ async fn auth() {
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "master",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
{
|
||||
"self": "master",
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
// try update with update permission
|
||||
server.use_api_key(update_network_key.as_str().unwrap());
|
||||
@@ -303,11 +308,12 @@ async fn auth() {
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "api_key",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
{
|
||||
"self": "api_key",
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
// try with the other's permission
|
||||
let (response, code) = server.get_network().await;
|
||||
@@ -383,7 +389,8 @@ async fn get_and_set_network() {
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": null,
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -393,7 +400,8 @@ async fn get_and_set_network() {
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "myself",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -417,13 +425,16 @@ async fn get_and_set_network() {
|
||||
"remotes": {
|
||||
"myself": {
|
||||
"url": "http://localhost:7700",
|
||||
"searchApiKey": null
|
||||
"searchApiKey": null,
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "foo"
|
||||
"searchApiKey": "foo",
|
||||
"writeApiKey": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -443,13 +454,16 @@ async fn get_and_set_network() {
|
||||
"remotes": {
|
||||
"myself": {
|
||||
"url": "http://localhost:7700",
|
||||
"searchApiKey": null
|
||||
"searchApiKey": null,
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
"searchApiKey": "bar",
|
||||
"writeApiKey": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -470,17 +484,21 @@ async fn get_and_set_network() {
|
||||
"remotes": {
|
||||
"myself": {
|
||||
"url": "http://localhost:7700",
|
||||
"searchApiKey": null
|
||||
"searchApiKey": null,
|
||||
"writeApiKey": null
|
||||
},
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
"searchApiKey": "baz",
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
"searchApiKey": "bar",
|
||||
"writeApiKey": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -498,13 +516,16 @@ async fn get_and_set_network() {
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
"searchApiKey": "baz",
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
"searchApiKey": "bar",
|
||||
"writeApiKey": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -518,13 +539,16 @@ async fn get_and_set_network() {
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
"searchApiKey": "baz",
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
"searchApiKey": "bar",
|
||||
"writeApiKey": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -538,13 +562,16 @@ async fn get_and_set_network() {
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
"searchApiKey": "baz",
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
"searchApiKey": "bar",
|
||||
"writeApiKey": null
|
||||
}
|
||||
}
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -553,60 +580,69 @@ async fn get_and_set_network() {
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "thy",
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
}
|
||||
}
|
||||
{
|
||||
"self": "thy",
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz",
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar",
|
||||
"writeApiKey": null
|
||||
}
|
||||
"###);
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
// still doing nothing
|
||||
let (response, code) = server.set_network(json!({"remotes": {}})).await;
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "thy",
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
}
|
||||
}
|
||||
{
|
||||
"self": "thy",
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz",
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar",
|
||||
"writeApiKey": null
|
||||
}
|
||||
"###);
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
// good time to check GET
|
||||
let (response, code) = server.get_network().await;
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "thy",
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
}
|
||||
}
|
||||
{
|
||||
"self": "thy",
|
||||
"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz",
|
||||
"writeApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar",
|
||||
"writeApiKey": null
|
||||
}
|
||||
"###);
|
||||
},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
// deleting everything
|
||||
let (response, code) = server
|
||||
@@ -619,7 +655,8 @@ async fn get_and_set_network() {
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "thy",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -131,7 +131,8 @@ async fn remote_sharding() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -139,7 +140,8 @@ async fn remote_sharding() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms2.set_network(json!({"self": "ms2"})).await;
|
||||
@@ -147,7 +149,8 @@ async fn remote_sharding() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms2",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -436,7 +439,8 @@ async fn error_unregistered_remote() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -444,7 +448,8 @@ async fn error_unregistered_remote() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -554,7 +559,8 @@ async fn error_no_weighted_score() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -562,7 +568,8 @@ async fn error_no_weighted_score() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -687,7 +694,8 @@ async fn error_bad_response() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -695,7 +703,8 @@ async fn error_bad_response() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -824,7 +833,8 @@ async fn error_bad_request() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -832,7 +842,8 @@ async fn error_bad_request() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -954,7 +965,8 @@ async fn error_bad_request_facets_by_index() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -962,7 +974,8 @@ async fn error_bad_request_facets_by_index() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1095,7 +1108,8 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -1103,7 +1117,8 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1244,7 +1259,8 @@ async fn error_remote_does_not_answer() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -1252,7 +1268,8 @@ async fn error_remote_does_not_answer() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1445,7 +1462,8 @@ async fn error_remote_404() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -1453,7 +1471,8 @@ async fn error_remote_404() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1640,7 +1659,8 @@ async fn error_remote_sharding_auth() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -1648,7 +1668,8 @@ async fn error_remote_sharding_auth() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1800,7 +1821,8 @@ async fn remote_sharding_auth() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -1808,7 +1830,8 @@ async fn remote_sharding_auth() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1955,7 +1978,8 @@ async fn error_remote_500() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -1963,7 +1987,8 @@ async fn error_remote_500() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -2134,7 +2159,8 @@ async fn error_remote_500_once() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
@@ -2142,7 +2168,8 @@ async fn error_remote_500_once() {
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
"remotes": {},
|
||||
"sharding": false
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -2499,7 +2526,7 @@ pub struct LocalMeiliParams {
|
||||
|
||||
/// A server that exploits [`MockServer`] to provide an URL for testing network and the network.
|
||||
pub struct LocalMeili {
|
||||
mock_server: MockServer,
|
||||
mock_server: &'static MockServer,
|
||||
}
|
||||
|
||||
impl LocalMeili {
|
||||
@@ -2508,7 +2535,7 @@ impl LocalMeili {
|
||||
}
|
||||
|
||||
pub async fn with_params(server: Arc<Server>, params: LocalMeiliParams) -> Self {
|
||||
let mock_server = MockServer::start().await;
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
|
||||
// tokio won't let us execute asynchronous code from a sync function inside of an async test,
|
||||
// so instead we spawn another thread that will call the service on a brand new tokio runtime
|
||||
@@ -2572,7 +2599,7 @@ impl LocalMeili {
|
||||
response.set_body_json(value)
|
||||
}
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
Self { mock_server }
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use super::shared_index_with_documents;
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn default_search_should_return_estimated_total_hit() {
|
||||
@@ -133,3 +134,61 @@ async fn ensure_placeholder_search_hit_count_valid() {
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_issue_5274() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Document 1",
|
||||
"content": "This is the first."
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"title": "Document 2",
|
||||
"content": "This is the second doc."
|
||||
}
|
||||
]);
|
||||
let (task, _code) = index.add_documents(documents, None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Find out the lowest ranking score among the documents
|
||||
let (rep, _status) = index
|
||||
.search_post(json!({"q": "doc", "page": 1, "hitsPerPage": 2, "showRankingScore": true}))
|
||||
.await;
|
||||
let hits = rep["hits"].as_array().expect("Missing hits array");
|
||||
let second_hit = hits.get(1).expect("Missing second hit");
|
||||
let ranking_score = second_hit
|
||||
.get("_rankingScore")
|
||||
.expect("Missing _rankingScore field")
|
||||
.as_f64()
|
||||
.expect("Expected _rankingScore to be a f64");
|
||||
|
||||
// Search with a ranking score threshold just above and expect to be a single hit
|
||||
let (rep, _status) = index
|
||||
.search_post(json!({"q": "doc", "page": 1, "hitsPerPage": 1, "rankingScoreThreshold": ranking_score + 0.0001}))
|
||||
.await;
|
||||
|
||||
snapshot!(json_string!(rep, {
|
||||
".processingTimeMs" => "[ignored]",
|
||||
}), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": 2,
|
||||
"title": "Document 2",
|
||||
"content": "This is the second doc."
|
||||
}
|
||||
],
|
||||
"query": "doc",
|
||||
"processingTimeMs": "[ignored]",
|
||||
"hitsPerPage": 1,
|
||||
"page": 1,
|
||||
"totalPages": 1,
|
||||
"totalHits": 1
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
@@ -692,3 +692,68 @@ async fn granular_filterable_attributes() {
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_searchable_attributes_order() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
// 1) Create an index with settings "searchableAttributes": ["title", "overview"]
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index
|
||||
.update_settings(json!({
|
||||
"searchableAttributes": ["title", "overview"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// 2) Add documents in the index
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"title": "The Matrix",
|
||||
"overview": "A computer hacker learns from mysterious rebels about the true nature of his reality."
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"title": "Inception",
|
||||
"overview": "A thief who steals corporate secrets through dream-sharing technology."
|
||||
}
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// 3) Modify the settings "searchableAttributes": ["overview", "title"] (overview is put first)
|
||||
let (task, code) = index
|
||||
.update_settings(json!({
|
||||
"searchableAttributes": ["overview", "title"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// 4) Check if it has been applied
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["searchableAttributes"], json!(["overview", "title"]));
|
||||
|
||||
// 5) Re-modify the settings "searchableAttributes": ["title", "overview"] (title is put first)
|
||||
let (task, code) = index
|
||||
.update_settings(json!({
|
||||
"searchableAttributes": ["title", "overview"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{task}");
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// 6) Check if it has been applied
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["searchableAttributes"], json!(["title", "overview"]));
|
||||
}
|
||||
|
||||
@@ -61,7 +61,16 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"pagination": {
|
||||
"maxTotalHits": 15
|
||||
},
|
||||
"embedders": {},
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"pooling": "forceMean",
|
||||
"documentTemplate": "{{doc.description}}",
|
||||
"documentTemplateMaxBytes": 400
|
||||
}
|
||||
},
|
||||
"searchCutoffMs": 8000,
|
||||
"localizedAttributes": [
|
||||
{
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
---
|
||||
source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
---
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"name": "kefir",
|
||||
"surname": [
|
||||
"kef",
|
||||
"kefkef",
|
||||
"kefirounet",
|
||||
"boubou"
|
||||
],
|
||||
"age": 1.4,
|
||||
"description": "kefir est un petit chien blanc très mignon",
|
||||
"_vectors": {
|
||||
"doggo_embedder": {
|
||||
"embeddings": "[vector]",
|
||||
"regenerate": true
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "intel",
|
||||
"surname": [
|
||||
"untel",
|
||||
"tétel",
|
||||
"iouiou"
|
||||
],
|
||||
"age": 11.5,
|
||||
"description": "intel est un grand beagle très mignon",
|
||||
"_vectors": {
|
||||
"doggo_embedder": {
|
||||
"embeddings": "[vector]",
|
||||
"regenerate": false
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -4,7 +4,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": 24,
|
||||
"uid": 30,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
@@ -26,6 +26,155 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"finishedAt": "[date]",
|
||||
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
|
||||
},
|
||||
{
|
||||
"uid": 29,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.067201S",
|
||||
"startedAt": "2025-07-07T13:43:08.772854Z",
|
||||
"finishedAt": "2025-07-07T13:43:08.840055Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 28,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"deletedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"indexDeletion": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"mieli": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.012727S",
|
||||
"startedAt": "2025-07-07T13:42:50.745461Z",
|
||||
"finishedAt": "2025-07-07T13:42:50.758188Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 27,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"failed": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.059920S",
|
||||
"startedAt": "2025-07-07T13:42:15.625413Z",
|
||||
"finishedAt": "2025-07-07T13:42:15.685333Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 26,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"mieli": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.088879S",
|
||||
"startedAt": "2025-07-07T13:40:01.461741Z",
|
||||
"finishedAt": "2025-07-07T13:40:01.55062Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 25,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.312911S",
|
||||
"startedAt": "2025-07-07T13:32:46.139785Z",
|
||||
"finishedAt": "2025-07-07T13:32:46.452696Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 24,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"documentTemplate": "{{doc.description}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.247378S",
|
||||
"startedAt": "2025-07-07T13:28:27.391344Z",
|
||||
"finishedAt": "2025-07-07T13:28:27.638722Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 23,
|
||||
"progress": null,
|
||||
@@ -348,179 +497,10 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 99
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 15
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007391353S",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007445825S",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
},
|
||||
"disableOnWords": [
|
||||
"kefir"
|
||||
],
|
||||
"disableOnAttributes": [
|
||||
"surname"
|
||||
]
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.012020083S",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
}
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007440092S",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"synonyms": {
|
||||
"boubou": [
|
||||
"kefir"
|
||||
]
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007565161S",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 5,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"stopWords": [
|
||||
"le",
|
||||
"un"
|
||||
]
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.016307263S",
|
||||
"startedAt": "2025-01-16T16:53:19.913351957Z",
|
||||
"finishedAt": "2025-01-16T16:53:19.92965922Z",
|
||||
"batchStrategy": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 23,
|
||||
"total": 29,
|
||||
"limit": 20,
|
||||
"from": 24,
|
||||
"next": 4
|
||||
"from": 30,
|
||||
"next": 10
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": 24,
|
||||
"uid": 30,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
@@ -26,6 +26,155 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"finishedAt": "[date]",
|
||||
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
|
||||
},
|
||||
{
|
||||
"uid": 29,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.067201S",
|
||||
"startedAt": "2025-07-07T13:43:08.772854Z",
|
||||
"finishedAt": "2025-07-07T13:43:08.840055Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 28,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"deletedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"indexDeletion": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"mieli": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.012727S",
|
||||
"startedAt": "2025-07-07T13:42:50.745461Z",
|
||||
"finishedAt": "2025-07-07T13:42:50.758188Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 27,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"failed": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.059920S",
|
||||
"startedAt": "2025-07-07T13:42:15.625413Z",
|
||||
"finishedAt": "2025-07-07T13:42:15.685333Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 26,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"mieli": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.088879S",
|
||||
"startedAt": "2025-07-07T13:40:01.461741Z",
|
||||
"finishedAt": "2025-07-07T13:40:01.55062Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 25,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.312911S",
|
||||
"startedAt": "2025-07-07T13:32:46.139785Z",
|
||||
"finishedAt": "2025-07-07T13:32:46.452696Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 24,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"documentTemplate": "{{doc.description}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.247378S",
|
||||
"startedAt": "2025-07-07T13:28:27.391344Z",
|
||||
"finishedAt": "2025-07-07T13:28:27.638722Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 23,
|
||||
"progress": null,
|
||||
@@ -348,179 +497,10 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 99
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 15
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007391353S",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007445825S",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
},
|
||||
"disableOnWords": [
|
||||
"kefir"
|
||||
],
|
||||
"disableOnAttributes": [
|
||||
"surname"
|
||||
]
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.012020083S",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
}
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007440092S",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"synonyms": {
|
||||
"boubou": [
|
||||
"kefir"
|
||||
]
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007565161S",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 5,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"stopWords": [
|
||||
"le",
|
||||
"un"
|
||||
]
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.016307263S",
|
||||
"startedAt": "2025-01-16T16:53:19.913351957Z",
|
||||
"finishedAt": "2025-01-16T16:53:19.92965922Z",
|
||||
"batchStrategy": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 23,
|
||||
"total": 29,
|
||||
"limit": 20,
|
||||
"from": 24,
|
||||
"next": 4
|
||||
"from": 30,
|
||||
"next": 10
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": 24,
|
||||
"uid": 30,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
@@ -26,6 +26,155 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"finishedAt": "[date]",
|
||||
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
|
||||
},
|
||||
{
|
||||
"uid": 29,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.067201S",
|
||||
"startedAt": "2025-07-07T13:43:08.772854Z",
|
||||
"finishedAt": "2025-07-07T13:43:08.840055Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 28,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"deletedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"indexDeletion": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"mieli": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.012727S",
|
||||
"startedAt": "2025-07-07T13:42:50.745461Z",
|
||||
"finishedAt": "2025-07-07T13:42:50.758188Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 27,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"failed": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.059920S",
|
||||
"startedAt": "2025-07-07T13:42:15.625413Z",
|
||||
"finishedAt": "2025-07-07T13:42:15.685333Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 26,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"mieli": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.088879S",
|
||||
"startedAt": "2025-07-07T13:40:01.461741Z",
|
||||
"finishedAt": "2025-07-07T13:40:01.55062Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 25,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.312911S",
|
||||
"startedAt": "2025-07-07T13:32:46.139785Z",
|
||||
"finishedAt": "2025-07-07T13:32:46.452696Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 24,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"documentTemplate": "{{doc.description}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.247378S",
|
||||
"startedAt": "2025-07-07T13:28:27.391344Z",
|
||||
"finishedAt": "2025-07-07T13:28:27.638722Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 23,
|
||||
"progress": null,
|
||||
@@ -348,179 +497,10 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 99
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 15
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007391353S",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007445825S",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
},
|
||||
"disableOnWords": [
|
||||
"kefir"
|
||||
],
|
||||
"disableOnAttributes": [
|
||||
"surname"
|
||||
]
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.012020083S",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
}
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007440092S",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"synonyms": {
|
||||
"boubou": [
|
||||
"kefir"
|
||||
]
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.007565161S",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 5,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"stopWords": [
|
||||
"le",
|
||||
"un"
|
||||
]
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.016307263S",
|
||||
"startedAt": "2025-01-16T16:53:19.913351957Z",
|
||||
"finishedAt": "2025-01-16T16:53:19.92965922Z",
|
||||
"batchStrategy": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 23,
|
||||
"total": 29,
|
||||
"limit": 20,
|
||||
"from": 24,
|
||||
"next": 4
|
||||
"from": 30,
|
||||
"next": 10
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": 25,
|
||||
"batchUid": 24,
|
||||
"uid": 31,
|
||||
"batchUid": 30,
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "upgradeDatabase",
|
||||
@@ -20,6 +20,118 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
},
|
||||
{
|
||||
"uid": 30,
|
||||
"batchUid": 29,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.067201S",
|
||||
"enqueuedAt": "2025-07-07T13:43:08.772432Z",
|
||||
"startedAt": "2025-07-07T13:43:08.772854Z",
|
||||
"finishedAt": "2025-07-07T13:43:08.840055Z"
|
||||
},
|
||||
{
|
||||
"uid": 29,
|
||||
"batchUid": 28,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"deletedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.012727S",
|
||||
"enqueuedAt": "2025-07-07T13:42:50.744793Z",
|
||||
"startedAt": "2025-07-07T13:42:50.745461Z",
|
||||
"finishedAt": "2025-07-07T13:42:50.758188Z"
|
||||
},
|
||||
{
|
||||
"uid": 28,
|
||||
"batchUid": 27,
|
||||
"indexUid": "kefir",
|
||||
"status": "failed",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `kefir`: Bad embedder configuration in the document with id: `2`. Could not parse `._vectors.doggo_embedder`: trailing characters at line 1 column 13",
|
||||
"code": "invalid_vectors_type",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_vectors_type"
|
||||
},
|
||||
"duration": "PT0.059920S",
|
||||
"enqueuedAt": "2025-07-07T13:42:15.624598Z",
|
||||
"startedAt": "2025-07-07T13:42:15.625413Z",
|
||||
"finishedAt": "2025-07-07T13:42:15.685333Z"
|
||||
},
|
||||
{
|
||||
"uid": 27,
|
||||
"batchUid": 26,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.088879S",
|
||||
"enqueuedAt": "2025-07-07T13:40:01.46081Z",
|
||||
"startedAt": "2025-07-07T13:40:01.461741Z",
|
||||
"finishedAt": "2025-07-07T13:40:01.55062Z"
|
||||
},
|
||||
{
|
||||
"uid": 26,
|
||||
"batchUid": 25,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.312911S",
|
||||
"enqueuedAt": "2025-07-07T13:32:46.13871Z",
|
||||
"startedAt": "2025-07-07T13:32:46.139785Z",
|
||||
"finishedAt": "2025-07-07T13:32:46.452696Z"
|
||||
},
|
||||
{
|
||||
"uid": 25,
|
||||
"batchUid": 24,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"documentTemplate": "{{doc.description}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.247378S",
|
||||
"enqueuedAt": "2025-07-07T13:28:27.390054Z",
|
||||
"startedAt": "2025-07-07T13:28:27.391344Z",
|
||||
"finishedAt": "2025-07-07T13:28:27.638722Z"
|
||||
},
|
||||
{
|
||||
"uid": 24,
|
||||
"batchUid": 23,
|
||||
@@ -264,134 +376,10 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"enqueuedAt": "2025-01-16T17:02:52.527382964Z",
|
||||
"startedAt": "2025-01-16T17:02:52.539749853Z",
|
||||
"finishedAt": "2025-01-16T17:02:52.547390016Z"
|
||||
},
|
||||
{
|
||||
"uid": 11,
|
||||
"batchUid": 11,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"searchCutoffMs": 8000
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007307840S",
|
||||
"enqueuedAt": "2025-01-16T17:01:14.100316617Z",
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
"batchUid": 10,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 99
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 15
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007391353S",
|
||||
"enqueuedAt": "2025-01-16T17:00:29.188815062Z",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
"batchUid": 9,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007445825S",
|
||||
"enqueuedAt": "2025-01-16T17:00:15.759501709Z",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
"batchUid": 8,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
},
|
||||
"disableOnWords": [
|
||||
"kefir"
|
||||
],
|
||||
"disableOnAttributes": [
|
||||
"surname"
|
||||
]
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.012020083S",
|
||||
"enqueuedAt": "2025-01-16T16:59:42.727292501Z",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
"batchUid": 7,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007440092S",
|
||||
"enqueuedAt": "2025-01-16T16:58:41.203145044Z",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
"batchUid": 6,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"synonyms": {
|
||||
"boubou": [
|
||||
"kefir"
|
||||
]
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007565161S",
|
||||
"enqueuedAt": "2025-01-16T16:54:51.927866243Z",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z"
|
||||
}
|
||||
],
|
||||
"total": 24,
|
||||
"total": 30,
|
||||
"limit": 20,
|
||||
"from": 25,
|
||||
"next": 5
|
||||
"from": 31,
|
||||
"next": 11
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": 25,
|
||||
"batchUid": 24,
|
||||
"uid": 31,
|
||||
"batchUid": 30,
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "upgradeDatabase",
|
||||
@@ -20,6 +20,118 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
},
|
||||
{
|
||||
"uid": 30,
|
||||
"batchUid": 29,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.067201S",
|
||||
"enqueuedAt": "2025-07-07T13:43:08.772432Z",
|
||||
"startedAt": "2025-07-07T13:43:08.772854Z",
|
||||
"finishedAt": "2025-07-07T13:43:08.840055Z"
|
||||
},
|
||||
{
|
||||
"uid": 29,
|
||||
"batchUid": 28,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"deletedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.012727S",
|
||||
"enqueuedAt": "2025-07-07T13:42:50.744793Z",
|
||||
"startedAt": "2025-07-07T13:42:50.745461Z",
|
||||
"finishedAt": "2025-07-07T13:42:50.758188Z"
|
||||
},
|
||||
{
|
||||
"uid": 28,
|
||||
"batchUid": 27,
|
||||
"indexUid": "kefir",
|
||||
"status": "failed",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `kefir`: Bad embedder configuration in the document with id: `2`. Could not parse `._vectors.doggo_embedder`: trailing characters at line 1 column 13",
|
||||
"code": "invalid_vectors_type",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_vectors_type"
|
||||
},
|
||||
"duration": "PT0.059920S",
|
||||
"enqueuedAt": "2025-07-07T13:42:15.624598Z",
|
||||
"startedAt": "2025-07-07T13:42:15.625413Z",
|
||||
"finishedAt": "2025-07-07T13:42:15.685333Z"
|
||||
},
|
||||
{
|
||||
"uid": 27,
|
||||
"batchUid": 26,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.088879S",
|
||||
"enqueuedAt": "2025-07-07T13:40:01.46081Z",
|
||||
"startedAt": "2025-07-07T13:40:01.461741Z",
|
||||
"finishedAt": "2025-07-07T13:40:01.55062Z"
|
||||
},
|
||||
{
|
||||
"uid": 26,
|
||||
"batchUid": 25,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.312911S",
|
||||
"enqueuedAt": "2025-07-07T13:32:46.13871Z",
|
||||
"startedAt": "2025-07-07T13:32:46.139785Z",
|
||||
"finishedAt": "2025-07-07T13:32:46.452696Z"
|
||||
},
|
||||
{
|
||||
"uid": 25,
|
||||
"batchUid": 24,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"documentTemplate": "{{doc.description}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.247378S",
|
||||
"enqueuedAt": "2025-07-07T13:28:27.390054Z",
|
||||
"startedAt": "2025-07-07T13:28:27.391344Z",
|
||||
"finishedAt": "2025-07-07T13:28:27.638722Z"
|
||||
},
|
||||
{
|
||||
"uid": 24,
|
||||
"batchUid": 23,
|
||||
@@ -264,134 +376,10 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"enqueuedAt": "2025-01-16T17:02:52.527382964Z",
|
||||
"startedAt": "2025-01-16T17:02:52.539749853Z",
|
||||
"finishedAt": "2025-01-16T17:02:52.547390016Z"
|
||||
},
|
||||
{
|
||||
"uid": 11,
|
||||
"batchUid": 11,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"searchCutoffMs": 8000
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007307840S",
|
||||
"enqueuedAt": "2025-01-16T17:01:14.100316617Z",
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
"batchUid": 10,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 99
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 15
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007391353S",
|
||||
"enqueuedAt": "2025-01-16T17:00:29.188815062Z",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
"batchUid": 9,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007445825S",
|
||||
"enqueuedAt": "2025-01-16T17:00:15.759501709Z",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
"batchUid": 8,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
},
|
||||
"disableOnWords": [
|
||||
"kefir"
|
||||
],
|
||||
"disableOnAttributes": [
|
||||
"surname"
|
||||
]
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.012020083S",
|
||||
"enqueuedAt": "2025-01-16T16:59:42.727292501Z",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
"batchUid": 7,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007440092S",
|
||||
"enqueuedAt": "2025-01-16T16:58:41.203145044Z",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
"batchUid": 6,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"synonyms": {
|
||||
"boubou": [
|
||||
"kefir"
|
||||
]
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007565161S",
|
||||
"enqueuedAt": "2025-01-16T16:54:51.927866243Z",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z"
|
||||
}
|
||||
],
|
||||
"total": 24,
|
||||
"total": 30,
|
||||
"limit": 20,
|
||||
"from": 25,
|
||||
"next": 5
|
||||
"from": 31,
|
||||
"next": 11
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": 25,
|
||||
"batchUid": 24,
|
||||
"uid": 31,
|
||||
"batchUid": 30,
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "upgradeDatabase",
|
||||
@@ -20,6 +20,118 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
},
|
||||
{
|
||||
"uid": 30,
|
||||
"batchUid": 29,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.067201S",
|
||||
"enqueuedAt": "2025-07-07T13:43:08.772432Z",
|
||||
"startedAt": "2025-07-07T13:43:08.772854Z",
|
||||
"finishedAt": "2025-07-07T13:43:08.840055Z"
|
||||
},
|
||||
{
|
||||
"uid": 29,
|
||||
"batchUid": 28,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"deletedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.012727S",
|
||||
"enqueuedAt": "2025-07-07T13:42:50.744793Z",
|
||||
"startedAt": "2025-07-07T13:42:50.745461Z",
|
||||
"finishedAt": "2025-07-07T13:42:50.758188Z"
|
||||
},
|
||||
{
|
||||
"uid": 28,
|
||||
"batchUid": 27,
|
||||
"indexUid": "kefir",
|
||||
"status": "failed",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `kefir`: Bad embedder configuration in the document with id: `2`. Could not parse `._vectors.doggo_embedder`: trailing characters at line 1 column 13",
|
||||
"code": "invalid_vectors_type",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_vectors_type"
|
||||
},
|
||||
"duration": "PT0.059920S",
|
||||
"enqueuedAt": "2025-07-07T13:42:15.624598Z",
|
||||
"startedAt": "2025-07-07T13:42:15.625413Z",
|
||||
"finishedAt": "2025-07-07T13:42:15.685333Z"
|
||||
},
|
||||
{
|
||||
"uid": 27,
|
||||
"batchUid": 26,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.088879S",
|
||||
"enqueuedAt": "2025-07-07T13:40:01.46081Z",
|
||||
"startedAt": "2025-07-07T13:40:01.461741Z",
|
||||
"finishedAt": "2025-07-07T13:40:01.55062Z"
|
||||
},
|
||||
{
|
||||
"uid": 26,
|
||||
"batchUid": 25,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.312911S",
|
||||
"enqueuedAt": "2025-07-07T13:32:46.13871Z",
|
||||
"startedAt": "2025-07-07T13:32:46.139785Z",
|
||||
"finishedAt": "2025-07-07T13:32:46.452696Z"
|
||||
},
|
||||
{
|
||||
"uid": 25,
|
||||
"batchUid": 24,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"documentTemplate": "{{doc.description}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.247378S",
|
||||
"enqueuedAt": "2025-07-07T13:28:27.390054Z",
|
||||
"startedAt": "2025-07-07T13:28:27.391344Z",
|
||||
"finishedAt": "2025-07-07T13:28:27.638722Z"
|
||||
},
|
||||
{
|
||||
"uid": 24,
|
||||
"batchUid": 23,
|
||||
@@ -264,134 +376,10 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"enqueuedAt": "2025-01-16T17:02:52.527382964Z",
|
||||
"startedAt": "2025-01-16T17:02:52.539749853Z",
|
||||
"finishedAt": "2025-01-16T17:02:52.547390016Z"
|
||||
},
|
||||
{
|
||||
"uid": 11,
|
||||
"batchUid": 11,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"searchCutoffMs": 8000
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007307840S",
|
||||
"enqueuedAt": "2025-01-16T17:01:14.100316617Z",
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
"batchUid": 10,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 99
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 15
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007391353S",
|
||||
"enqueuedAt": "2025-01-16T17:00:29.188815062Z",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
"batchUid": 9,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007445825S",
|
||||
"enqueuedAt": "2025-01-16T17:00:15.759501709Z",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
"batchUid": 8,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
},
|
||||
"disableOnWords": [
|
||||
"kefir"
|
||||
],
|
||||
"disableOnAttributes": [
|
||||
"surname"
|
||||
]
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.012020083S",
|
||||
"enqueuedAt": "2025-01-16T16:59:42.727292501Z",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
"batchUid": 7,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"typoTolerance": {
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 4
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007440092S",
|
||||
"enqueuedAt": "2025-01-16T16:58:41.203145044Z",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
"batchUid": 6,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"synonyms": {
|
||||
"boubou": [
|
||||
"kefir"
|
||||
]
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.007565161S",
|
||||
"enqueuedAt": "2025-01-16T16:54:51.927866243Z",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z"
|
||||
}
|
||||
],
|
||||
"total": 24,
|
||||
"total": 30,
|
||||
"limit": 20,
|
||||
"from": 25,
|
||||
"next": 5
|
||||
"from": 31,
|
||||
"next": 11
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": 24,
|
||||
"uid": 30,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
@@ -26,6 +26,155 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"finishedAt": "[date]",
|
||||
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
|
||||
},
|
||||
{
|
||||
"uid": 29,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.067201S",
|
||||
"startedAt": "2025-07-07T13:43:08.772854Z",
|
||||
"finishedAt": "2025-07-07T13:43:08.840055Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 28,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"deletedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"indexDeletion": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"mieli": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.012727S",
|
||||
"startedAt": "2025-07-07T13:42:50.745461Z",
|
||||
"finishedAt": "2025-07-07T13:42:50.758188Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 27,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"failed": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.059920S",
|
||||
"startedAt": "2025-07-07T13:42:15.625413Z",
|
||||
"finishedAt": "2025-07-07T13:42:15.685333Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 26,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"mieli": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.088879S",
|
||||
"startedAt": "2025-07-07T13:40:01.461741Z",
|
||||
"finishedAt": "2025-07-07T13:40:01.55062Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 25,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"documentAdditionOrUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.312911S",
|
||||
"startedAt": "2025-07-07T13:32:46.139785Z",
|
||||
"finishedAt": "2025-07-07T13:32:46.452696Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 24,
|
||||
"progress": null,
|
||||
"details": {
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"documentTemplate": "{{doc.description}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
"status": {
|
||||
"succeeded": 1
|
||||
},
|
||||
"types": {
|
||||
"settingsUpdate": 1
|
||||
},
|
||||
"indexUids": {
|
||||
"kefir": 1
|
||||
}
|
||||
},
|
||||
"duration": "PT0.247378S",
|
||||
"startedAt": "2025-07-07T13:28:27.391344Z",
|
||||
"finishedAt": "2025-07-07T13:28:27.638722Z",
|
||||
"batchStrategy": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 23,
|
||||
"progress": null,
|
||||
@@ -642,8 +791,8 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"batchStrategy": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 25,
|
||||
"total": 31,
|
||||
"limit": 1000,
|
||||
"from": 24,
|
||||
"from": 30,
|
||||
"next": null
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": 25,
|
||||
"batchUid": 24,
|
||||
"uid": 31,
|
||||
"batchUid": 30,
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "upgradeDatabase",
|
||||
@@ -20,6 +20,118 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
},
|
||||
{
|
||||
"uid": 30,
|
||||
"batchUid": 29,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.067201S",
|
||||
"enqueuedAt": "2025-07-07T13:43:08.772432Z",
|
||||
"startedAt": "2025-07-07T13:43:08.772854Z",
|
||||
"finishedAt": "2025-07-07T13:43:08.840055Z"
|
||||
},
|
||||
{
|
||||
"uid": 29,
|
||||
"batchUid": 28,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"deletedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.012727S",
|
||||
"enqueuedAt": "2025-07-07T13:42:50.744793Z",
|
||||
"startedAt": "2025-07-07T13:42:50.745461Z",
|
||||
"finishedAt": "2025-07-07T13:42:50.758188Z"
|
||||
},
|
||||
{
|
||||
"uid": 28,
|
||||
"batchUid": 27,
|
||||
"indexUid": "kefir",
|
||||
"status": "failed",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `kefir`: Bad embedder configuration in the document with id: `2`. Could not parse `._vectors.doggo_embedder`: trailing characters at line 1 column 13",
|
||||
"code": "invalid_vectors_type",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_vectors_type"
|
||||
},
|
||||
"duration": "PT0.059920S",
|
||||
"enqueuedAt": "2025-07-07T13:42:15.624598Z",
|
||||
"startedAt": "2025-07-07T13:42:15.625413Z",
|
||||
"finishedAt": "2025-07-07T13:42:15.685333Z"
|
||||
},
|
||||
{
|
||||
"uid": 27,
|
||||
"batchUid": 26,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.088879S",
|
||||
"enqueuedAt": "2025-07-07T13:40:01.46081Z",
|
||||
"startedAt": "2025-07-07T13:40:01.461741Z",
|
||||
"finishedAt": "2025-07-07T13:40:01.55062Z"
|
||||
},
|
||||
{
|
||||
"uid": 26,
|
||||
"batchUid": 25,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.312911S",
|
||||
"enqueuedAt": "2025-07-07T13:32:46.13871Z",
|
||||
"startedAt": "2025-07-07T13:32:46.139785Z",
|
||||
"finishedAt": "2025-07-07T13:32:46.452696Z"
|
||||
},
|
||||
{
|
||||
"uid": 25,
|
||||
"batchUid": 24,
|
||||
"indexUid": "kefir",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"embedders": {
|
||||
"doggo_embedder": {
|
||||
"source": "huggingFace",
|
||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||
"documentTemplate": "{{doc.description}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": null,
|
||||
"duration": "PT0.247378S",
|
||||
"enqueuedAt": "2025-07-07T13:28:27.390054Z",
|
||||
"startedAt": "2025-07-07T13:28:27.391344Z",
|
||||
"finishedAt": "2025-07-07T13:28:27.638722Z"
|
||||
},
|
||||
{
|
||||
"uid": 24,
|
||||
"batchUid": 23,
|
||||
@@ -497,8 +609,8 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"finishedAt": "2025-01-16T16:45:16.131303739Z"
|
||||
}
|
||||
],
|
||||
"total": 26,
|
||||
"total": 32,
|
||||
"limit": 1000,
|
||||
"from": 25,
|
||||
"from": 31,
|
||||
"next": null
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -114,13 +114,13 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
|
||||
// All the indexes are still present
|
||||
let (indexes, _) = server.list_indexes(None, None).await;
|
||||
snapshot!(indexes, @r#"
|
||||
snapshot!(indexes, @r###"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uid": "kefir",
|
||||
"createdAt": "2025-01-16T16:45:16.020663157Z",
|
||||
"updatedAt": "2025-01-23T11:36:22.634859166Z",
|
||||
"updatedAt": "2025-07-07T13:43:08.835381Z",
|
||||
"primaryKey": "id"
|
||||
}
|
||||
],
|
||||
@@ -128,7 +128,7 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
"limit": 20,
|
||||
"total": 1
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
// And their metadata are still right
|
||||
let (stats, _) = server.stats().await;
|
||||
assert_json_snapshot!(stats, {
|
||||
@@ -141,21 +141,21 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
{
|
||||
"databaseSize": "[bytes]",
|
||||
"usedDatabaseSize": "[bytes]",
|
||||
"lastUpdate": "2025-01-23T11:36:22.634859166Z",
|
||||
"lastUpdate": "2025-07-07T13:43:08.835381Z",
|
||||
"indexes": {
|
||||
"kefir": {
|
||||
"numberOfDocuments": 1,
|
||||
"numberOfDocuments": 2,
|
||||
"rawDocumentDbSize": "[bytes]",
|
||||
"avgDocumentSize": "[bytes]",
|
||||
"isIndexing": false,
|
||||
"numberOfEmbeddings": 0,
|
||||
"numberOfEmbeddedDocuments": 0,
|
||||
"numberOfEmbeddings": 2,
|
||||
"numberOfEmbeddedDocuments": 2,
|
||||
"fieldDistribution": {
|
||||
"age": 1,
|
||||
"description": 1,
|
||||
"id": 1,
|
||||
"name": 1,
|
||||
"surname": 1
|
||||
"age": 2,
|
||||
"description": 2,
|
||||
"id": 2,
|
||||
"name": 2,
|
||||
"surname": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -227,21 +227,21 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
{
|
||||
"databaseSize": "[bytes]",
|
||||
"usedDatabaseSize": "[bytes]",
|
||||
"lastUpdate": "2025-01-23T11:36:22.634859166Z",
|
||||
"lastUpdate": "2025-07-07T13:43:08.835381Z",
|
||||
"indexes": {
|
||||
"kefir": {
|
||||
"numberOfDocuments": 1,
|
||||
"numberOfDocuments": 2,
|
||||
"rawDocumentDbSize": "[bytes]",
|
||||
"avgDocumentSize": "[bytes]",
|
||||
"isIndexing": false,
|
||||
"numberOfEmbeddings": 0,
|
||||
"numberOfEmbeddedDocuments": 0,
|
||||
"numberOfEmbeddings": 2,
|
||||
"numberOfEmbeddedDocuments": 2,
|
||||
"fieldDistribution": {
|
||||
"age": 1,
|
||||
"description": 1,
|
||||
"id": 1,
|
||||
"name": 1,
|
||||
"surname": 1
|
||||
"age": 2,
|
||||
"description": 2,
|
||||
"id": 2,
|
||||
"name": 2,
|
||||
"surname": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -254,18 +254,18 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
".avgDocumentSize" => "[bytes]",
|
||||
}), @r###"
|
||||
{
|
||||
"numberOfDocuments": 1,
|
||||
"numberOfDocuments": 2,
|
||||
"rawDocumentDbSize": "[bytes]",
|
||||
"avgDocumentSize": "[bytes]",
|
||||
"isIndexing": false,
|
||||
"numberOfEmbeddings": 0,
|
||||
"numberOfEmbeddedDocuments": 0,
|
||||
"numberOfEmbeddings": 2,
|
||||
"numberOfEmbeddedDocuments": 2,
|
||||
"fieldDistribution": {
|
||||
"age": 1,
|
||||
"description": 1,
|
||||
"id": 1,
|
||||
"name": 1,
|
||||
"surname": 1
|
||||
"age": 2,
|
||||
"description": 2,
|
||||
"id": 2,
|
||||
"name": 2,
|
||||
"surname": 2
|
||||
}
|
||||
}
|
||||
"###);
|
||||
@@ -295,4 +295,8 @@ async fn check_the_index_features(server: &Server) {
|
||||
let (results, _status) =
|
||||
kefir.search_post(json!({ "sort": ["age:asc"], "filter": "surname = kefirounet" })).await;
|
||||
snapshot!(results, name: "search_with_sort_and_filter");
|
||||
|
||||
// ensuring we can get the vectors and their `regenerate` is still good.
|
||||
let (results, _status) = kefir.search_post(json!({"retrieveVectors": true})).await;
|
||||
snapshot!(json_string!(results["hits"], {"[]._vectors.doggo_embedder.embeddings" => "[vector]"}), name: "search_with_retrieve_vectors");
|
||||
}
|
||||
|
||||
2120
crates/meilisearch/tests/vector/fragments.rs
Normal file
2120
crates/meilisearch/tests/vector/fragments.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,5 @@
|
||||
mod binary_quantized;
|
||||
mod fragments;
|
||||
#[cfg(feature = "test-ollama")]
|
||||
mod ollama;
|
||||
mod openai;
|
||||
|
||||
@@ -136,7 +136,7 @@ fn long_text() -> &'static str {
|
||||
})
|
||||
}
|
||||
|
||||
async fn create_mock_tokenized() -> (MockServer, Value) {
|
||||
async fn create_mock_tokenized() -> (&'static MockServer, Value) {
|
||||
create_mock_with_template("{{doc.text}}", ModelDimensions::Large, false, false).await
|
||||
}
|
||||
|
||||
@@ -145,8 +145,8 @@ async fn create_mock_with_template(
|
||||
model_dimensions: ModelDimensions,
|
||||
fallible: bool,
|
||||
slow: bool,
|
||||
) -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
) -> (&'static MockServer, Value) {
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
const API_KEY: &str = "my-api-key";
|
||||
const API_KEY_BEARER: &str = "Bearer my-api-key";
|
||||
|
||||
@@ -299,7 +299,7 @@ async fn create_mock_with_template(
|
||||
}
|
||||
}))
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
@@ -321,27 +321,27 @@ const DOGGO_TEMPLATE: &str = r#"{%- if doc.gender == "F" -%}Une chienne nommée
|
||||
Un chien nommé {{doc.name}}, né en {{doc.birthyear}}
|
||||
{%- endif %}, de race {{doc.breed}}."#;
|
||||
|
||||
async fn create_mock() -> (MockServer, Value) {
|
||||
async fn create_mock() -> (&'static MockServer, Value) {
|
||||
create_mock_with_template(DOGGO_TEMPLATE, ModelDimensions::Large, false, false).await
|
||||
}
|
||||
|
||||
async fn create_mock_dimensions() -> (MockServer, Value) {
|
||||
async fn create_mock_dimensions() -> (&'static MockServer, Value) {
|
||||
create_mock_with_template(DOGGO_TEMPLATE, ModelDimensions::Large512, false, false).await
|
||||
}
|
||||
|
||||
async fn create_mock_small_embedding_model() -> (MockServer, Value) {
|
||||
async fn create_mock_small_embedding_model() -> (&'static MockServer, Value) {
|
||||
create_mock_with_template(DOGGO_TEMPLATE, ModelDimensions::Small, false, false).await
|
||||
}
|
||||
|
||||
async fn create_mock_legacy_embedding_model() -> (MockServer, Value) {
|
||||
async fn create_mock_legacy_embedding_model() -> (&'static MockServer, Value) {
|
||||
create_mock_with_template(DOGGO_TEMPLATE, ModelDimensions::Ada, false, false).await
|
||||
}
|
||||
|
||||
async fn create_fallible_mock() -> (MockServer, Value) {
|
||||
async fn create_fallible_mock() -> (&'static MockServer, Value) {
|
||||
create_mock_with_template(DOGGO_TEMPLATE, ModelDimensions::Large, true, false).await
|
||||
}
|
||||
|
||||
async fn create_slow_mock() -> (MockServer, Value) {
|
||||
async fn create_slow_mock() -> (&'static MockServer, Value) {
|
||||
create_mock_with_template(DOGGO_TEMPLATE, ModelDimensions::Large, true, true).await
|
||||
}
|
||||
|
||||
|
||||
@@ -12,8 +12,8 @@ use crate::common::Value;
|
||||
use crate::json;
|
||||
use crate::vector::{get_server_vector, GetAllDocumentsOptions};
|
||||
|
||||
async fn create_mock() -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
async fn create_mock() -> (&'static MockServer, Value) {
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
|
||||
let text_to_embedding: BTreeMap<_, _> = vec![
|
||||
// text -> embedding
|
||||
@@ -32,7 +32,7 @@ async fn create_mock() -> (MockServer, Value) {
|
||||
json!({ "data": text_to_embedding.get(text.as_str()).unwrap_or(&[99., 99., 99.]) }),
|
||||
)
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
@@ -50,8 +50,8 @@ async fn create_mock() -> (MockServer, Value) {
|
||||
(mock_server, embedder_settings)
|
||||
}
|
||||
|
||||
async fn create_mock_default_template() -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
async fn create_mock_default_template() -> (&'static MockServer, Value) {
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
|
||||
let text_to_embedding: BTreeMap<_, _> = vec![
|
||||
// text -> embedding
|
||||
@@ -73,7 +73,7 @@ async fn create_mock_default_template() -> (MockServer, Value) {
|
||||
.set_body_json(json!({"error": "text not found", "text": text})),
|
||||
}
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
@@ -106,8 +106,8 @@ struct SingleResponse {
|
||||
embedding: Vec<f32>,
|
||||
}
|
||||
|
||||
async fn create_mock_multiple() -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
async fn create_mock_multiple() -> (&'static MockServer, Value) {
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
|
||||
let text_to_embedding: BTreeMap<_, _> = vec![
|
||||
// text -> embedding
|
||||
@@ -146,7 +146,7 @@ async fn create_mock_multiple() -> (MockServer, Value) {
|
||||
|
||||
ResponseTemplate::new(200).set_body_json(response)
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
@@ -176,8 +176,8 @@ struct SingleRequest {
|
||||
input: String,
|
||||
}
|
||||
|
||||
async fn create_mock_single_response_in_array() -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
async fn create_mock_single_response_in_array() -> (&'static MockServer, Value) {
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
|
||||
let text_to_embedding: BTreeMap<_, _> = vec![
|
||||
// text -> embedding
|
||||
@@ -212,7 +212,7 @@ async fn create_mock_single_response_in_array() -> (MockServer, Value) {
|
||||
|
||||
ResponseTemplate::new(200).set_body_json(response)
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
@@ -236,8 +236,8 @@ async fn create_mock_single_response_in_array() -> (MockServer, Value) {
|
||||
(mock_server, embedder_settings)
|
||||
}
|
||||
|
||||
async fn create_mock_raw_with_custom_header() -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
async fn create_mock_raw_with_custom_header() -> (&'static MockServer, Value) {
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
|
||||
let text_to_embedding: BTreeMap<_, _> = vec![
|
||||
// text -> embedding
|
||||
@@ -277,7 +277,7 @@ async fn create_mock_raw_with_custom_header() -> (MockServer, Value) {
|
||||
|
||||
ResponseTemplate::new(200).set_body_json(output)
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
@@ -293,8 +293,8 @@ async fn create_mock_raw_with_custom_header() -> (MockServer, Value) {
|
||||
(mock_server, embedder_settings)
|
||||
}
|
||||
|
||||
async fn create_mock_raw() -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
async fn create_mock_raw() -> (&'static MockServer, Value) {
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
|
||||
let text_to_embedding: BTreeMap<_, _> = vec![
|
||||
// text -> embedding
|
||||
@@ -321,7 +321,7 @@ async fn create_mock_raw() -> (MockServer, Value) {
|
||||
|
||||
ResponseTemplate::new(200).set_body_json(output)
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
@@ -337,8 +337,8 @@ async fn create_mock_raw() -> (MockServer, Value) {
|
||||
(mock_server, embedder_settings)
|
||||
}
|
||||
|
||||
async fn create_faulty_mock_raw(sender: mpsc::Sender<()>) -> (MockServer, Value) {
|
||||
let mock_server = MockServer::start().await;
|
||||
async fn create_faulty_mock_raw(sender: mpsc::Sender<()>) -> (&'static MockServer, Value) {
|
||||
let mock_server = Box::leak(Box::new(MockServer::start().await));
|
||||
let count = AtomicUsize::new(0);
|
||||
|
||||
Mock::given(method("POST"))
|
||||
@@ -355,7 +355,7 @@ async fn create_faulty_mock_raw(sender: mpsc::Sender<()>) -> (MockServer, Value)
|
||||
ResponseTemplate::new(500).set_body_string("Service Unavailable")
|
||||
}
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.mount(mock_server)
|
||||
.await;
|
||||
|
||||
let url = mock_server.uri();
|
||||
|
||||
@@ -101,14 +101,7 @@ async fn reset_embedder_documents() {
|
||||
server.wait_task(response.uid()).await;
|
||||
|
||||
// Make sure the documents are still present
|
||||
let (documents, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
limit: None,
|
||||
offset: None,
|
||||
retrieve_vectors: false,
|
||||
fields: None,
|
||||
})
|
||||
.await;
|
||||
let (documents, _code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
snapshot!(json_string!(documents), @r###"
|
||||
{
|
||||
"results": [
|
||||
|
||||
@@ -15,6 +15,7 @@ use meilisearch_types::heed::{
|
||||
};
|
||||
use meilisearch_types::milli::constants::RESERVED_VECTORS_FIELD_NAME;
|
||||
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
|
||||
use meilisearch_types::milli::index::EmbeddingsWithMetadata;
|
||||
use meilisearch_types::milli::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors};
|
||||
use meilisearch_types::milli::{obkv_to_json, BEU32};
|
||||
use meilisearch_types::tasks::{Status, Task};
|
||||
@@ -591,12 +592,21 @@ fn export_documents(
|
||||
.into());
|
||||
};
|
||||
|
||||
for (embedder_name, (embeddings, regenerate)) in embeddings {
|
||||
for (
|
||||
embedder_name,
|
||||
EmbeddingsWithMetadata { embeddings, regenerate, has_fragments },
|
||||
) in embeddings
|
||||
{
|
||||
let embeddings = ExplicitVectors {
|
||||
embeddings: Some(VectorOrArrayOfVectors::from_array_of_vectors(
|
||||
embeddings,
|
||||
)),
|
||||
regenerate,
|
||||
regenerate: regenerate &&
|
||||
// Meilisearch does not handle well dumps with fragments, because as the fragments
|
||||
// are marked as user-provided,
|
||||
// all embeddings would be regenerated on any settings change or document update.
|
||||
// To prevent this, we mark embeddings has non regenerate in this case.
|
||||
!has_fragments,
|
||||
};
|
||||
vectors
|
||||
.insert(embedder_name, serde_json::to_value(embeddings).unwrap());
|
||||
|
||||
@@ -40,7 +40,7 @@ indexmap = { version = "2.9.0", features = ["serde"] }
|
||||
json-depth-checker = { path = "../json-depth-checker" }
|
||||
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
||||
memchr = "2.7.5"
|
||||
memmap2 = "0.9.5"
|
||||
memmap2 = "0.9.7"
|
||||
obkv = "0.3.0"
|
||||
once_cell = "1.21.3"
|
||||
ordered-float = "5.0.0"
|
||||
@@ -109,6 +109,7 @@ utoipa = { version = "5.4.0", features = [
|
||||
"openapi_extensions",
|
||||
] }
|
||||
lru = "0.14.0"
|
||||
twox-hash = { version = "2.1.1", default-features = false, features = ["std", "xxhash3_64", "xxhash64"] }
|
||||
|
||||
[dev-dependencies]
|
||||
mimalloc = { version = "0.1.47", default-features = false }
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user