mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-12 07:35:43 +00:00
Compare commits
574 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2dd7f29edf | ||
|
|
ff680d29a8 | ||
|
|
00420dfca0 | ||
|
|
a3a86ac629 | ||
|
|
f6210b8e5e | ||
|
|
fe46af7ded | ||
|
|
57b94b411f | ||
|
|
a7b6f65851 | ||
|
|
1ec6646d8c | ||
|
|
2dccacf273 | ||
|
|
ce0f04e9ee | ||
|
|
9ba5c6d371 | ||
|
|
56673fee56 | ||
|
|
b30bcbb931 | ||
|
|
5fbe4436c8 | ||
|
|
8fa253c293 | ||
|
|
4833da9edb | ||
|
|
c0e31a4f01 | ||
|
|
c06ffb31d1 | ||
|
|
3097314b9d | ||
|
|
786a978237 | ||
|
|
03e53aaf6d | ||
|
|
2206f045a4 | ||
|
|
246cf8b2d1 | ||
|
|
82adabc5a0 | ||
|
|
c9a22247d2 | ||
|
|
c535b8ddef | ||
|
|
8e89619aed | ||
|
|
f617ca8e38 | ||
|
|
959175ad2a | ||
|
|
341ffbf5ef | ||
|
|
542f3073f4 | ||
|
|
0f134b079f | ||
|
|
9e7ae47355 | ||
|
|
1edf07df29 | ||
|
|
88aa3cddde | ||
|
|
e6846cb55a | ||
|
|
29b715e2f9 | ||
|
|
f28dc5bd2b | ||
|
|
56d0b8ea54 | ||
|
|
514edb1b79 | ||
|
|
cfb609d41d | ||
|
|
11cb062067 | ||
|
|
2ca4926ac5 | ||
|
|
834bd9b879 | ||
|
|
cac7e00983 | ||
|
|
e9300bac64 | ||
|
|
b0da7864a4 | ||
|
|
2b9d379feb | ||
|
|
8d585a04d4 | ||
|
|
0095a72fba | ||
|
|
651339648c | ||
|
|
a489f4c172 | ||
|
|
3b875ea00e | ||
|
|
9d269c499c | ||
|
|
da35ae0a6e | ||
|
|
61945b235d | ||
|
|
e936ac172d | ||
|
|
162a84cdbf | ||
|
|
92c63cf351 | ||
|
|
fca35b7476 | ||
|
|
4056657a55 | ||
|
|
685d227597 | ||
|
|
49b9f6ff38 | ||
|
|
79d0a3fb97 | ||
|
|
313ef7e79b | ||
|
|
256407be61 | ||
|
|
8b3943bd32 | ||
|
|
87b972d29a | ||
|
|
09ab61b360 | ||
|
|
2459f381b4 | ||
|
|
6442f02de4 | ||
|
|
91c4d9ea79 | ||
|
|
92a4091da3 | ||
|
|
29a337f0f9 | ||
|
|
8c3cebadaa | ||
|
|
b566458aa2 | ||
|
|
ae4344e359 | ||
|
|
b6cb384650 | ||
|
|
2c3e3d856c | ||
|
|
93e97f814c | ||
|
|
e9350f033d | ||
|
|
54c92fd6c0 | ||
|
|
4f4df83a51 | ||
|
|
a51021cab7 | ||
|
|
e33f4fdeae | ||
|
|
e407bca196 | ||
|
|
cd24ea11b4 | ||
|
|
ba578e7ab5 | ||
|
|
05a74d1e68 | ||
|
|
41d61deb97 | ||
|
|
bba292b01a | ||
|
|
96923dff33 | ||
|
|
8f9c9305da | ||
|
|
a9f309e1d1 | ||
|
|
e456a9acd8 | ||
|
|
9b7d29466c | ||
|
|
b0ef14b6f0 | ||
|
|
36febe2068 | ||
|
|
6f14a6ec18 | ||
|
|
1a45b19e7e | ||
|
|
bd7525b166 | ||
|
|
359757d939 | ||
|
|
1c6eea596c | ||
|
|
693b6f483e | ||
|
|
818a4aa6d9 | ||
|
|
ddadeb99e9 | ||
|
|
b8d8be934a | ||
|
|
7175d70b8f | ||
|
|
8a3e65ab6f | ||
|
|
4737e1a2a5 | ||
|
|
36522e951b | ||
|
|
fce046d84d | ||
|
|
3fc507bb44 | ||
|
|
fdbcd033fb | ||
|
|
aaab49baca | ||
|
|
0d0d6e8099 | ||
|
|
c1e351c92b | ||
|
|
67cab4cc9d | ||
|
|
f30a37b0fe | ||
|
|
a78a9f80dd | ||
|
|
439fee5434 | ||
|
|
9e858590e0 | ||
|
|
29eebd5f93 | ||
|
|
07da6edbdf | ||
|
|
22b83042e6 | ||
|
|
52ab13906a | ||
|
|
29bec8efd4 | ||
|
|
6947a8990b | ||
|
|
fbb2bb0c73 | ||
|
|
15918f53a9 | ||
|
|
d7f5f3a0a3 | ||
|
|
1afbf35f27 | ||
|
|
d7675233d5 | ||
|
|
c63c1ac32b | ||
|
|
6171dcde0d | ||
|
|
04bc134324 | ||
|
|
8ff39d927d | ||
|
|
ffd461c800 | ||
|
|
9134d27980 | ||
|
|
f60242979f | ||
|
|
d347417cfd | ||
|
|
55d54afd69 | ||
|
|
dca7679c47 | ||
|
|
a34b692396 | ||
|
|
63829b62e9 | ||
|
|
44c8252ad5 | ||
|
|
19ae428890 | ||
|
|
7adcb657ae | ||
|
|
9624768976 | ||
|
|
5025acfd2a | ||
|
|
4bbfdccc3e | ||
|
|
a5b24b54b8 | ||
|
|
461e69c143 | ||
|
|
915aeafefe | ||
|
|
408529d8b2 | ||
|
|
1724ab6d94 | ||
|
|
49a500a342 | ||
|
|
f26eabcfa1 | ||
|
|
b468c090f3 | ||
|
|
c14114840e | ||
|
|
7933d1f9ea | ||
|
|
6f1d3f337b | ||
|
|
9640706c5a | ||
|
|
01cd273a52 | ||
|
|
ae87d1cab9 | ||
|
|
d5a5372aba | ||
|
|
cf62af13e8 | ||
|
|
0d5e176dc2 | ||
|
|
d6f36a773d | ||
|
|
91cf94c196 | ||
|
|
753ba39199 | ||
|
|
3944c25853 | ||
|
|
925bce5fbd | ||
|
|
62065ed30d | ||
|
|
97e6ae1957 | ||
|
|
5ed9be0789 | ||
|
|
7597b1049f | ||
|
|
d99150f21b | ||
|
|
c9726674a0 | ||
|
|
205f40b3b8 | ||
|
|
3d013cdebe | ||
|
|
ddeff5678f | ||
|
|
a235434910 | ||
|
|
a376525348 | ||
|
|
361580f451 | ||
|
|
ea70a7d1c9 | ||
|
|
9304f8e586 | ||
|
|
495db080ec | ||
|
|
d71341fa48 | ||
|
|
5b3070d8c3 | ||
|
|
89006fd4b3 | ||
|
|
49f50a0a21 | ||
|
|
1104f00803 | ||
|
|
33fa564a9c | ||
|
|
a097b254f8 | ||
|
|
54cb0ec437 | ||
|
|
38ed1f1dbb | ||
|
|
643dd33358 | ||
|
|
32f9fb6ab2 | ||
|
|
b5966f82e8 | ||
|
|
5e54063aab | ||
|
|
40456795d0 | ||
|
|
40e60c6f52 | ||
|
|
eeae6383d0 | ||
|
|
8cbcaeff56 | ||
|
|
ce87d5a89e | ||
|
|
9f7172f6ab | ||
|
|
d6eca83cfa | ||
|
|
a9d6e86077 | ||
|
|
346f9efe3a | ||
|
|
a987d698c1 | ||
|
|
fc3508c8c8 | ||
|
|
dbb45dec1a | ||
|
|
5f69a43846 | ||
|
|
fe1e4814fa | ||
|
|
c29749741b | ||
|
|
3e47201365 | ||
|
|
ec9719f3b1 | ||
|
|
b2cc9e4db8 | ||
|
|
56198bae48 | ||
|
|
888059b2d0 | ||
|
|
410f2fc8c3 | ||
|
|
54e244d2f3 | ||
|
|
e0c36972fb | ||
|
|
daadcddb5e | ||
|
|
7f92dafa02 | ||
|
|
cc5d12a368 | ||
|
|
0f98b996b5 | ||
|
|
d005ca5bf7 | ||
|
|
7e65fb1d3e | ||
|
|
cdefb3f665 | ||
|
|
a91887221a | ||
|
|
9c66b20a97 | ||
|
|
a48283527e | ||
|
|
73f78c19b0 | ||
|
|
34639e346e | ||
|
|
7af2a254d6 | ||
|
|
0f9d262a1c | ||
|
|
747476a225 | ||
|
|
34765b556b | ||
|
|
dfb4860578 | ||
|
|
ce62713f02 | ||
|
|
8b5d04d60f | ||
|
|
1b74709b91 | ||
|
|
a5c0a282c5 | ||
|
|
4fc048ff20 | ||
|
|
375b5600cd | ||
|
|
32b997d817 | ||
|
|
ff3090e3cc | ||
|
|
6c6645f945 | ||
|
|
af6473d999 | ||
|
|
11851f9701 | ||
|
|
cc4654eabd | ||
|
|
0bb91f4a77 | ||
|
|
f9d57f54df | ||
|
|
3ef1afc0f1 | ||
|
|
dbb5abebb6 | ||
|
|
700f33bd39 | ||
|
|
d01bbbccde | ||
|
|
4fc506f267 | ||
|
|
dc456276e5 | ||
|
|
b2ea50cb10 | ||
|
|
5074cf92ab | ||
|
|
a92bc8d192 | ||
|
|
ee538cf045 | ||
|
|
2b05d63a0f | ||
|
|
104e8918ce | ||
|
|
d6ec4d4f4a | ||
|
|
f0e7326b7a | ||
|
|
c8106a0006 | ||
|
|
c9ab5bc0b6 | ||
|
|
5e0f15fd43 | ||
|
|
4c30f090c7 | ||
|
|
63f247cdda | ||
|
|
e109fa9529 | ||
|
|
76e4ec2168 | ||
|
|
982babdb74 | ||
|
|
7ae2ae33d9 | ||
|
|
cb0788ae07 | ||
|
|
cb3e5dc234 | ||
|
|
59d40a2821 | ||
|
|
98a678e73d | ||
|
|
70292aae3c | ||
|
|
73521f0069 | ||
|
|
4533179604 | ||
|
|
1a21cc1a17 | ||
|
|
d08042f8a7 | ||
|
|
77aadb5f22 | ||
|
|
4fd913f7eb | ||
|
|
4b72e54ca7 | ||
|
|
adef2cc132 | ||
|
|
533b9951b1 | ||
|
|
9103cbc9db | ||
|
|
083de2bfc1 | ||
|
|
8618a4d2ba | ||
|
|
08bc982748 | ||
|
|
e9c5df7993 | ||
|
|
a8d55562e9 | ||
|
|
40d649ec9e | ||
|
|
8a28b3aa77 | ||
|
|
1a0b100ad9 | ||
|
|
ff93563f41 | ||
|
|
2f25258191 | ||
|
|
2859079c32 | ||
|
|
c272ac8204 | ||
|
|
e18c677f0e | ||
|
|
84a288da57 | ||
|
|
cbfc325b56 | ||
|
|
74b83d305f | ||
|
|
70f6e4b828 | ||
|
|
ea640b076e | ||
|
|
6df196034e | ||
|
|
a63762737c | ||
|
|
77394bd4b9 | ||
|
|
cb87201c8b | ||
|
|
1a9c38794f | ||
|
|
34233efb63 | ||
|
|
af0608ebd6 | ||
|
|
8c7e5c094e | ||
|
|
c064737137 | ||
|
|
1d188a7ad3 | ||
|
|
66a6b65716 | ||
|
|
326652a399 | ||
|
|
59316e8d5a | ||
|
|
76d7f20c87 | ||
|
|
380b2797a5 | ||
|
|
1dd58f9bec | ||
|
|
ddc76ad0dc | ||
|
|
ffacf1c002 | ||
|
|
5a49b93b77 | ||
|
|
918a6eaec9 | ||
|
|
1e6ce70e3e | ||
|
|
b418054ee4 | ||
|
|
58f30e9d8a | ||
|
|
c45172a4bf | ||
|
|
221ba20083 | ||
|
|
93c5fbbb8b | ||
|
|
22d529523a | ||
|
|
ed6f479940 | ||
|
|
f19f712433 | ||
|
|
24a92c2809 | ||
|
|
443cc24408 | ||
|
|
e8d5228250 | ||
|
|
5c33fb090c | ||
|
|
48dd9146e7 | ||
|
|
c1c42e818e | ||
|
|
519905ef9c | ||
|
|
f242377d2b | ||
|
|
da06306274 | ||
|
|
b93b803a2e | ||
|
|
cf43ec4aff | ||
|
|
9795d98e77 | ||
|
|
316b4c047f | ||
|
|
1d701c6980 | ||
|
|
0203adb9cb | ||
|
|
0d05c2ad6e | ||
|
|
b3f44c4abd | ||
|
|
62115f57b1 | ||
|
|
9023172139 | ||
|
|
59631afd9a | ||
|
|
c2584c6edd | ||
|
|
685663af3c | ||
|
|
72b4b41443 | ||
|
|
70aa768d48 | ||
|
|
6029677eec | ||
|
|
3c78f4121e | ||
|
|
89170dd78f | ||
|
|
6379a62d95 | ||
|
|
4c05c0cf96 | ||
|
|
ce832da16c | ||
|
|
14de657d36 | ||
|
|
9a36c090bf | ||
|
|
3aca010b42 | ||
|
|
62c11ce3f3 | ||
|
|
f358538f4f | ||
|
|
9068857ba1 | ||
|
|
d241157084 | ||
|
|
69f73b1d74 | ||
|
|
202794f620 | ||
|
|
38cbd54604 | ||
|
|
3877e0043c | ||
|
|
f95398420b | ||
|
|
53905c1362 | ||
|
|
113aac8815 | ||
|
|
d2071dde1f | ||
|
|
4502af5aed | ||
|
|
06af68aa07 | ||
|
|
6d378c6397 | ||
|
|
ec0c0cf779 | ||
|
|
851694e323 | ||
|
|
ea92c64fdc | ||
|
|
dc36f681be | ||
|
|
48f1987a8d | ||
|
|
b98e2cef81 | ||
|
|
9f79ce82af | ||
|
|
5f18a9b2ee | ||
|
|
7f8a1ac0be | ||
|
|
1a67163ee8 | ||
|
|
38141de68d | ||
|
|
7a98b80687 | ||
|
|
229a12c8e6 | ||
|
|
2fdfe79400 | ||
|
|
9184b12a26 | ||
|
|
742378d8e1 | ||
|
|
6dcd739a8b | ||
|
|
f97384da6c | ||
|
|
6ea76f2771 | ||
|
|
715b255371 | ||
|
|
db094d3923 | ||
|
|
c29bdcae23 | ||
|
|
75219181a3 | ||
|
|
a5b5cf7cd1 | ||
|
|
142ba8ea00 | ||
|
|
4bc823e07c | ||
|
|
db06ca7138 | ||
|
|
95595a768e | ||
|
|
36f649768e | ||
|
|
0c6fc243f2 | ||
|
|
dfc46d5627 | ||
|
|
11d55f2121 | ||
|
|
014da57cf6 | ||
|
|
70a0ff4a8f | ||
|
|
dd0d5e4b90 | ||
|
|
15b3bb1700 | ||
|
|
077ec2ab11 | ||
|
|
f25db0795e | ||
|
|
c50a337c29 | ||
|
|
efeae09ce1 | ||
|
|
ad55b48664 | ||
|
|
94eabd34e6 | ||
|
|
6935589f74 | ||
|
|
4beb452027 | ||
|
|
b722da303a | ||
|
|
ad39263b94 | ||
|
|
0ffb08b112 | ||
|
|
ff80b4d0ff | ||
|
|
7fb4404928 | ||
|
|
8405f0bf9c | ||
|
|
3a7f9b56fe | ||
|
|
61034e2e2e | ||
|
|
108d6d3344 | ||
|
|
35bd00f6a1 | ||
|
|
69059d67ef | ||
|
|
e13783103f | ||
|
|
f719665c4e | ||
|
|
638f284614 | ||
|
|
32ac98ed95 | ||
|
|
46aee695ca | ||
|
|
716c67f858 | ||
|
|
fec10bb2d6 | ||
|
|
3dac2cf73e | ||
|
|
03eca800e6 | ||
|
|
28fa2e960e | ||
|
|
a3b9220f84 | ||
|
|
c09d48edf2 | ||
|
|
ae4ab0ebbb | ||
|
|
900a9a6d59 | ||
|
|
fc560e6730 | ||
|
|
e2a06470b7 | ||
|
|
ada27323f2 | ||
|
|
607a1c2395 | ||
|
|
b56956ea0c | ||
|
|
3d21290f7f | ||
|
|
4edd4c06bc | ||
|
|
566baddc6b | ||
|
|
febe3186ce | ||
|
|
5dd42c1871 | ||
|
|
8670793e6e | ||
|
|
41a04aa3ab | ||
|
|
88f841bc05 | ||
|
|
d19892d2ea | ||
|
|
c0905d6650 | ||
|
|
576d7d94b1 | ||
|
|
f4f1334b62 | ||
|
|
aaff6c3685 | ||
|
|
42d2af4c84 | ||
|
|
6be91c824c | ||
|
|
6ee0537db8 | ||
|
|
3fbeff4308 | ||
|
|
375546b61a | ||
|
|
25a1d50763 | ||
|
|
6f0d26c22c | ||
|
|
4fe073cc1a | ||
|
|
5cd3d36d20 | ||
|
|
9f4dcd04e9 | ||
|
|
d7ad76ea1e | ||
|
|
e82bb93221 | ||
|
|
000cb93aad | ||
|
|
ad4f5514b9 | ||
|
|
8d29a29867 | ||
|
|
d7de819d11 | ||
|
|
7a6cf30cb2 | ||
|
|
e43d67591c | ||
|
|
134237d1eb | ||
|
|
26d9070aa7 | ||
|
|
f9ffb8ada5 | ||
|
|
a47888f02c | ||
|
|
5bef2f4d86 | ||
|
|
06b3ca9eb5 | ||
|
|
7dc1c03a36 | ||
|
|
0b74722a73 | ||
|
|
0f80249b70 | ||
|
|
a9b8a60320 | ||
|
|
fd795c513b | ||
|
|
ce136ec0c1 | ||
|
|
4d4f6d2c20 | ||
|
|
4cc8fb2c5c | ||
|
|
5d47590f3e | ||
|
|
16461a9145 | ||
|
|
17810394b8 | ||
|
|
15690b9e22 | ||
|
|
a8cd81c7f4 | ||
|
|
6376571df0 | ||
|
|
cfb040e647 | ||
|
|
f54773781a | ||
|
|
13df964564 | ||
|
|
0faf495173 | ||
|
|
c32c74671d | ||
|
|
b05bcf2c13 | ||
|
|
90cc5263f6 | ||
|
|
424d0e277e | ||
|
|
34eba61c0d | ||
|
|
687260bc13 | ||
|
|
6b6e69b07a | ||
|
|
a25111f32e | ||
|
|
c4848e6cc0 | ||
|
|
454581dbc9 | ||
|
|
bc5100dddd | ||
|
|
118c6da64d | ||
|
|
a989f52657 | ||
|
|
a8cc66899c | ||
|
|
c9cc748f42 | ||
|
|
4ccce18d7b | ||
|
|
00d1006cd9 | ||
|
|
8933d87031 | ||
|
|
231f86decf | ||
|
|
381de52fc5 | ||
|
|
5fc7872ab3 | ||
|
|
b2f2807a94 | ||
|
|
da6fffdf6d | ||
|
|
b5f0c19406 | ||
|
|
fb7ccc0db3 | ||
|
|
69a84fbfe6 | ||
|
|
31cb960992 | ||
|
|
6d9e0c4bce | ||
|
|
a8e9597f49 | ||
|
|
f4147a60a3 | ||
|
|
5139dd273e | ||
|
|
72c63d3929 | ||
|
|
97ea9e9937 | ||
|
|
4645813ea8 | ||
|
|
fb68f1241c | ||
|
|
f5f2f7c6f2 | ||
|
|
6340412219 | ||
|
|
6e4dfa0168 | ||
|
|
0d4b78a217 | ||
|
|
aef07f4bfa | ||
|
|
0b3f983d27 | ||
|
|
52d55ccd8e | ||
|
|
6d92c94bb3 | ||
|
|
30110a0488 | ||
|
|
47cee7e1ea | ||
|
|
493d67ffd4 | ||
|
|
2b2559016a | ||
|
|
6176b143bb | ||
|
|
f9d0d1ddd6 | ||
|
|
e50f970ab8 | ||
|
|
27550dafad | ||
|
|
a7cd6853db | ||
|
|
f51f7832a7 | ||
|
|
a38a57acb6 | ||
|
|
affcaef556 | ||
|
|
d52c7dcc94 |
5
.github/ISSUE_TEMPLATE/new_feature_issue.md
vendored
5
.github/ISSUE_TEMPLATE/new_feature_issue.md
vendored
@@ -24,6 +24,11 @@ TBD
|
|||||||
- [ ] If not, add the `no db change` label to your PR, and you're good to merge.
|
- [ ] If not, add the `no db change` label to your PR, and you're good to merge.
|
||||||
- [ ] If yes, add the `db change` label to your PR. You'll receive a message explaining you what to do.
|
- [ ] If yes, add the `db change` label to your PR. You'll receive a message explaining you what to do.
|
||||||
|
|
||||||
|
### Reminders when adding features
|
||||||
|
|
||||||
|
- [ ] Write unit tests using insta
|
||||||
|
- [ ] Write declarative integration tests in [workloads/tests](https://github.com/meilisearch/meilisearch/tree/main/workloads/test). Specify the routes to call and then call `cargo xtask test workloads/tests/YOUR_TEST.json --update-responses` so that responses are automatically filled.
|
||||||
|
|
||||||
### Reminders when modifying the API
|
### Reminders when modifying the API
|
||||||
|
|
||||||
- [ ] Update the openAPI file with utoipa:
|
- [ ] Update the openAPI file with utoipa:
|
||||||
|
|||||||
1
.github/dependabot.yml
vendored
1
.github/dependabot.yml
vendored
@@ -7,6 +7,5 @@ updates:
|
|||||||
schedule:
|
schedule:
|
||||||
interval: "monthly"
|
interval: "monthly"
|
||||||
labels:
|
labels:
|
||||||
- 'skip changelog'
|
|
||||||
- 'dependencies'
|
- 'dependencies'
|
||||||
rebase-strategy: disabled
|
rebase-strategy: disabled
|
||||||
|
|||||||
33
.github/release-draft-template.yml
vendored
33
.github/release-draft-template.yml
vendored
@@ -1,33 +0,0 @@
|
|||||||
name-template: 'v$RESOLVED_VERSION'
|
|
||||||
tag-template: 'v$RESOLVED_VERSION'
|
|
||||||
exclude-labels:
|
|
||||||
- 'skip changelog'
|
|
||||||
version-resolver:
|
|
||||||
minor:
|
|
||||||
labels:
|
|
||||||
- 'enhancement'
|
|
||||||
default: patch
|
|
||||||
categories:
|
|
||||||
- title: '⚠️ Breaking changes'
|
|
||||||
label: 'breaking-change'
|
|
||||||
- title: '🚀 Enhancements'
|
|
||||||
label: 'enhancement'
|
|
||||||
- title: '🐛 Bug Fixes'
|
|
||||||
label: 'bug'
|
|
||||||
- title: '🔒 Security'
|
|
||||||
label: 'security'
|
|
||||||
- title: '⚙️ Maintenance/misc'
|
|
||||||
label:
|
|
||||||
- 'maintenance'
|
|
||||||
- 'documentation'
|
|
||||||
template: |
|
|
||||||
$CHANGES
|
|
||||||
|
|
||||||
❤️ Huge thanks to our contributors: $CONTRIBUTORS.
|
|
||||||
no-changes-template: 'Changes are coming soon 😎'
|
|
||||||
sort-direction: 'ascending'
|
|
||||||
replacers:
|
|
||||||
- search: '/(?:and )?@dependabot-preview(?:\[bot\])?,?/g'
|
|
||||||
replace: ''
|
|
||||||
- search: '/(?:and )?@dependabot(?:\[bot\])?,?/g'
|
|
||||||
replace: ''
|
|
||||||
2
.github/workflows/bench-manual.yml
vendored
2
.github/workflows/bench-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/bench-pr.yml
vendored
4
.github/workflows/bench-pr.yml
vendored
@@ -66,9 +66,7 @@ jobs:
|
|||||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
|
|
||||||
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
4
.github/workflows/bench-push-indexing.yml
vendored
4
.github/workflows/bench-push-indexing.yml
vendored
@@ -12,9 +12,7 @@ jobs:
|
|||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
|
|
||||||
# Run benchmarks
|
# Run benchmarks
|
||||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
||||||
|
|||||||
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/benchmarks-pr.yml
vendored
2
.github/workflows/benchmarks-pr.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/db-change-comments.yml
vendored
4
.github/workflows/db-change-comments.yml
vendored
@@ -19,6 +19,7 @@ env:
|
|||||||
|
|
||||||
- [ ] Detail the change to the DB format and why they are forward compatible
|
- [ ] Detail the change to the DB format and why they are forward compatible
|
||||||
- [ ] Forward-compatibility: A database created before this PR and using the features touched by this PR was able to be opened by a Meilisearch produced by the code of this PR.
|
- [ ] Forward-compatibility: A database created before this PR and using the features touched by this PR was able to be opened by a Meilisearch produced by the code of this PR.
|
||||||
|
- [ ] Declarative test: add a [declarative test containing a dumpless upgrade](https://github.com/meilisearch/meilisearch/blob/main/TESTING.md#typical-usage)
|
||||||
|
|
||||||
|
|
||||||
## This PR makes breaking changes
|
## This PR makes breaking changes
|
||||||
@@ -35,8 +36,7 @@ env:
|
|||||||
- [ ] Write the code to go from the old database to the new one
|
- [ ] Write the code to go from the old database to the new one
|
||||||
- If the change happened in milli, the upgrade function should be written and called [here](https://github.com/meilisearch/meilisearch/blob/3fd86e8d76d7d468b0095d679adb09211ca3b6c0/crates/milli/src/update/upgrade/mod.rs#L24-L47)
|
- If the change happened in milli, the upgrade function should be written and called [here](https://github.com/meilisearch/meilisearch/blob/3fd86e8d76d7d468b0095d679adb09211ca3b6c0/crates/milli/src/update/upgrade/mod.rs#L24-L47)
|
||||||
- If the change happened in the index-scheduler, we've never done it yet, but the right place to do it should be [here](https://github.com/meilisearch/meilisearch/blob/3fd86e8d76d7d468b0095d679adb09211ca3b6c0/crates/index-scheduler/src/scheduler/process_upgrade/mod.rs#L13)
|
- If the change happened in the index-scheduler, we've never done it yet, but the right place to do it should be [here](https://github.com/meilisearch/meilisearch/blob/3fd86e8d76d7d468b0095d679adb09211ca3b6c0/crates/index-scheduler/src/scheduler/process_upgrade/mod.rs#L13)
|
||||||
- [ ] Write an integration test [here](https://github.com/meilisearch/meilisearch/blob/main/crates/meilisearch/tests/upgrade/mod.rs) ensuring you can read the old database, upgrade to the new database, and read the new database as expected
|
- [ ] Declarative test: add a [declarative test containing a dumpless upgrade](https://github.com/meilisearch/meilisearch/blob/main/TESTING.md#typical-usage)
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
add-comment:
|
add-comment:
|
||||||
|
|||||||
8
.github/workflows/flaky-tests.yml
vendored
8
.github/workflows/flaky-tests.yml
vendored
@@ -13,11 +13,17 @@ jobs:
|
|||||||
image: ubuntu:22.04
|
image: ubuntu:22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
|
run: |
|
||||||
|
sudo rm -rf "/opt/ghc" || true
|
||||||
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Install cargo-flaky
|
- name: Install cargo-flaky
|
||||||
run: cargo install cargo-flaky
|
run: cargo install cargo-flaky
|
||||||
- name: Run cargo flaky in the dumps
|
- name: Run cargo flaky in the dumps
|
||||||
|
|||||||
4
.github/workflows/fuzzer-indexing.yml
vendored
4
.github/workflows/fuzzer-indexing.yml
vendored
@@ -12,9 +12,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
|
|
||||||
# Run benchmarks
|
# Run benchmarks
|
||||||
- name: Run the fuzzer
|
- name: Run the fuzzer
|
||||||
|
|||||||
8
.github/workflows/publish-apt-brew-pkg.yml
vendored
8
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -25,7 +25,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
|
run: |
|
||||||
|
sudo rm -rf "/opt/ghc" || true
|
||||||
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Install cargo-deb
|
- name: Install cargo-deb
|
||||||
run: cargo install cargo-deb
|
run: cargo install cargo-deb
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
|
|||||||
177
.github/workflows/publish-docker-images.yml
vendored
177
.github/workflows/publish-docker-images.yml
vendored
@@ -14,10 +14,105 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker:
|
build:
|
||||||
runs-on: docker
|
runs-on: ${{ matrix.runner }}
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform: [amd64, arm64]
|
||||||
|
edition: [community, enterprise]
|
||||||
|
include:
|
||||||
|
- platform: amd64
|
||||||
|
runner: ubuntu-24.04
|
||||||
|
- platform: arm64
|
||||||
|
runner: ubuntu-24.04-arm
|
||||||
|
- edition: community
|
||||||
|
registry: getmeili/meilisearch
|
||||||
|
feature-flag: ""
|
||||||
|
- edition: enterprise
|
||||||
|
registry: getmeili/meilisearch-enterprise
|
||||||
|
feature-flag: "--features enterprise"
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
platform=linux/${{ matrix.platform }}
|
||||||
|
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
with:
|
||||||
|
platforms: linux/${{ matrix.platform }}
|
||||||
|
install: true
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ matrix.registry }}
|
||||||
|
# Prevent `latest` to be updated for each new tag pushed.
|
||||||
|
# We need latest and `vX.Y` tags to only be pushed for the stable Meilisearch releases.
|
||||||
|
flavor: latest=false
|
||||||
|
tags: |
|
||||||
|
type=ref,event=tag
|
||||||
|
type=raw,value=nightly,enable=${{ github.event_name != 'push' }}
|
||||||
|
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||||
|
type=semver,pattern=v{{major}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||||
|
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
||||||
|
|
||||||
|
- name: Build and push by digest
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
id: build-and-push
|
||||||
|
with:
|
||||||
|
platforms: linux/${{ matrix.platform }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
tags: ${{ matrix.registry }}
|
||||||
|
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
||||||
|
build-args: |
|
||||||
|
COMMIT_SHA=${{ github.sha }}
|
||||||
|
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||||
|
GIT_TAG=${{ github.ref_name }}
|
||||||
|
EXTRA_ARGS=${{ matrix.feature-flag }}
|
||||||
|
|
||||||
|
- name: Export digest
|
||||||
|
run: |
|
||||||
|
mkdir -p ${{ runner.temp }}/digests
|
||||||
|
digest="${{ steps.build-and-push.outputs.digest }}"
|
||||||
|
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||||
|
|
||||||
|
- name: Upload digest
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: digests-${{ matrix.edition }}-${{ env.PLATFORM_PAIR }}
|
||||||
|
path: ${{ runner.temp }}/digests/*
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 1
|
||||||
|
|
||||||
|
merge:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
edition: [community, enterprise]
|
||||||
|
include:
|
||||||
|
- edition: community
|
||||||
|
registry: getmeili/meilisearch
|
||||||
|
- edition: enterprise
|
||||||
|
registry: getmeili/meilisearch-enterprise
|
||||||
|
needs:
|
||||||
|
- build
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # This is needed to use Cosign in keyless mode
|
id-token: write # This is needed to use Cosign in keyless mode
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
@@ -58,14 +153,15 @@ jobs:
|
|||||||
|
|
||||||
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Install cosign
|
- name: Install cosign
|
||||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # tag=v3.9.2
|
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # tag=v3.10.0
|
||||||
|
|
||||||
|
- name: Download digests
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
path: ${{ runner.temp }}/digests
|
||||||
|
pattern: digests-${{ matrix.edition }}-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
@@ -73,11 +169,14 @@ jobs:
|
|||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: getmeili/meilisearch
|
images: ${{ matrix.registry }}
|
||||||
# Prevent `latest` to be updated for each new tag pushed.
|
# Prevent `latest` to be updated for each new tag pushed.
|
||||||
# We need latest and `vX.Y` tags to only be pushed for the stable Meilisearch releases.
|
# We need latest and `vX.Y` tags to only be pushed for the stable Meilisearch releases.
|
||||||
flavor: latest=false
|
flavor: latest=false
|
||||||
@@ -88,33 +187,31 @@ jobs:
|
|||||||
type=semver,pattern=v{{major}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
type=semver,pattern=v{{major}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Create manifest list and push
|
||||||
uses: docker/build-push-action@v6
|
working-directory: ${{ runner.temp }}/digests
|
||||||
id: build-and-push
|
run: |
|
||||||
with:
|
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
push: true
|
$(printf '${{ matrix.registry }}@sha256:%s ' *)
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
- name: Inspect image to fetch digest to sign
|
||||||
build-args: |
|
run: |
|
||||||
COMMIT_SHA=${{ github.sha }}
|
digest=$(docker buildx imagetools inspect --format='{{ json .Manifest }}' ${{ matrix.registry }}:${{ steps.meta.outputs.version }} | jq -r '.digest')
|
||||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
echo "DIGEST=${digest}" >> $GITHUB_ENV
|
||||||
GIT_TAG=${{ github.ref_name }}
|
|
||||||
|
|
||||||
- name: Sign the images with GitHub OIDC Token
|
- name: Sign the images with GitHub OIDC Token
|
||||||
env:
|
env:
|
||||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
|
||||||
TAGS: ${{ steps.meta.outputs.tags }}
|
TAGS: ${{ steps.meta.outputs.tags }}
|
||||||
run: |
|
run: |
|
||||||
images=""
|
images=""
|
||||||
for tag in ${TAGS}; do
|
for tag in ${TAGS}; do
|
||||||
images+="${tag}@${DIGEST} "
|
images+="${tag}@${{ env.DIGEST }} "
|
||||||
done
|
done
|
||||||
cosign sign --yes ${images}
|
cosign sign --yes ${images}
|
||||||
|
|
||||||
# /!\ Don't touch this without checking with Cloud team
|
# /!\ Don't touch this without checking with engineers working on the Cloud code base on #discussion-engineering Slack channel
|
||||||
- name: Send CI information to Cloud team
|
- name: Notify meilisearch-cloud
|
||||||
# Do not send if nightly build (i.e. 'schedule' or 'workflow_dispatch' event)
|
# Do not send if nightly build (i.e. 'schedule' or 'workflow_dispatch' event)
|
||||||
if: github.event_name == 'push'
|
if: ${{ (github.event_name == 'push') && (matrix.edition == 'enterprise') }}
|
||||||
uses: peter-evans/repository-dispatch@v3
|
uses: peter-evans/repository-dispatch@v3
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
@@ -122,21 +219,13 @@ jobs:
|
|||||||
event-type: cloud-docker-build
|
event-type: cloud-docker-build
|
||||||
client-payload: '{ "meilisearch_version": "${{ github.ref_name }}", "stable": "${{ steps.check-tag-format.outputs.stable }}" }'
|
client-payload: '{ "meilisearch_version": "${{ github.ref_name }}", "stable": "${{ steps.check-tag-format.outputs.stable }}" }'
|
||||||
|
|
||||||
# Send notification to Swarmia to notify of a deployment: https://app.swarmia.com
|
# /!\ Don't touch this without checking with integration team members on #discussion-integrations Slack channel
|
||||||
# - name: 'Setup jq'
|
- name: Notify meilisearch-kubernetes
|
||||||
# uses: dcarbone/install-jq-action
|
# Do not send if nightly build (i.e. 'schedule' or 'workflow_dispatch' event), or if not stable
|
||||||
# - name: Send deployment to Swarmia
|
if: ${{ github.event_name == 'push' && matrix.edition == 'community' && steps.check-tag-format.outputs.stable == 'true' }}
|
||||||
# if: github.event_name == 'push' && success()
|
uses: peter-evans/repository-dispatch@v3
|
||||||
# run: |
|
with:
|
||||||
# JSON_STRING=$( jq --null-input --compact-output \
|
token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
# --arg version "${{ github.ref_name }}" \
|
repository: meilisearch/meilisearch-kubernetes
|
||||||
# --arg appName "meilisearch" \
|
event-type: meilisearch-release
|
||||||
# --arg environment "production" \
|
client-payload: '{ "version": "${{ github.ref_name }}" }'
|
||||||
# --arg commitSha "${{ github.sha }}" \
|
|
||||||
# --arg repositoryFullName "${{ github.repository }}" \
|
|
||||||
# '{"version": $version, "appName": $appName, "environment": $environment, "commitSha": $commitSha, "repositoryFullName": $repositoryFullName}' )
|
|
||||||
|
|
||||||
# curl -H "Authorization: ${{ secrets.SWARMIA_DEPLOYMENTS_AUTHORIZATION }}" \
|
|
||||||
# -H "Content-Type: application/json" \
|
|
||||||
# -d "$JSON_STRING" \
|
|
||||||
# https://hook.swarmia.com/deployments
|
|
||||||
|
|||||||
185
.github/workflows/publish-release-assets.yml
vendored
185
.github/workflows/publish-release-assets.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
check-version:
|
check-version:
|
||||||
name: Check the version validity
|
name: Check the version validity
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# No need to check the version for dry run (cron)
|
# No need to check the version for dry run (cron or workflow_dispatch)
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
# Check if the tag has the v<nmumber>.<number>.<number> format.
|
# Check if the tag has the v<nmumber>.<number>.<number> format.
|
||||||
@@ -32,161 +32,66 @@ jobs:
|
|||||||
if: github.event_name == 'release' && steps.check-tag-format.outputs.stable == 'true'
|
if: github.event_name == 'release' && steps.check-tag-format.outputs.stable == 'true'
|
||||||
run: bash .github/scripts/check-release.sh
|
run: bash .github/scripts/check-release.sh
|
||||||
|
|
||||||
publish-linux:
|
publish-binaries:
|
||||||
name: Publish binary for Linux
|
name: Publish binary for ${{ matrix.release }} ${{ matrix.edition }} edition
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: check-version
|
|
||||||
container:
|
|
||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
|
||||||
image: ubuntu:22.04
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v5
|
|
||||||
- name: Install needed dependencies
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y curl
|
|
||||||
apt-get install build-essential -y
|
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
|
||||||
- name: Build
|
|
||||||
run: cargo build --release --locked
|
|
||||||
# No need to upload binaries for dry run (cron)
|
|
||||||
- name: Upload binaries to release
|
|
||||||
if: github.event_name == 'release'
|
|
||||||
uses: svenstaro/upload-release-action@2.11.2
|
|
||||||
with:
|
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
file: target/release/meilisearch
|
|
||||||
asset_name: meilisearch-linux-amd64
|
|
||||||
tag: ${{ github.ref }}
|
|
||||||
|
|
||||||
publish-macos-windows:
|
|
||||||
name: Publish binary for ${{ matrix.os }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
needs: check-version
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-13, windows-2022]
|
edition: [community, enterprise]
|
||||||
|
release:
|
||||||
|
[macos-amd64, macos-aarch64, windows, linux-amd64, linux-aarch64]
|
||||||
include:
|
include:
|
||||||
- os: macos-13
|
- edition: "community"
|
||||||
artifact_name: meilisearch
|
feature-flag: ""
|
||||||
asset_name: meilisearch-macos-amd64
|
edition-suffix: ""
|
||||||
- os: windows-2022
|
- edition: "enterprise"
|
||||||
artifact_name: meilisearch.exe
|
feature-flag: "--features enterprise"
|
||||||
asset_name: meilisearch-windows-amd64.exe
|
edition-suffix: "enterprise-"
|
||||||
|
- release: macos-amd64
|
||||||
|
os: macos-15-intel
|
||||||
|
binary_path: release/meilisearch
|
||||||
|
asset_name: macos-amd64
|
||||||
|
extra-args: ""
|
||||||
|
- release: macos-aarch64
|
||||||
|
os: macos-14
|
||||||
|
binary_path: aarch64-apple-darwin/release/meilisearch
|
||||||
|
asset_name: macos-apple-silicon
|
||||||
|
extra-args: "--target aarch64-apple-darwin"
|
||||||
|
- release: windows
|
||||||
|
os: windows-2022
|
||||||
|
binary_path: release/meilisearch.exe
|
||||||
|
asset_name: windows-amd64.exe
|
||||||
|
extra-args: ""
|
||||||
|
- release: linux-amd64
|
||||||
|
os: ubuntu-22.04
|
||||||
|
binary_path: x86_64-unknown-linux-gnu/release/meilisearch
|
||||||
|
asset_name: linux-amd64
|
||||||
|
extra-args: "--target x86_64-unknown-linux-gnu"
|
||||||
|
- release: linux-aarch64
|
||||||
|
os: ubuntu-22.04-arm
|
||||||
|
binary_path: aarch64-unknown-linux-gnu/release/meilisearch
|
||||||
|
asset_name: linux-aarch64
|
||||||
|
extra-args: "--target aarch64-unknown-linux-gnu"
|
||||||
|
needs: check-version
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked ${{ matrix.feature-flag }} ${{ matrix.extra-args }}
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron or workflow_dispatch)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.11.2
|
uses: svenstaro/upload-release-action@2.11.2
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/${{ matrix.artifact_name }}
|
file: target/${{ matrix.binary_path }}
|
||||||
asset_name: ${{ matrix.asset_name }}
|
asset_name: meilisearch-${{ matrix.edition-suffix }}${{ matrix.asset_name }}
|
||||||
tag: ${{ github.ref }}
|
|
||||||
|
|
||||||
publish-macos-apple-silicon:
|
|
||||||
name: Publish binary for macOS silicon
|
|
||||||
runs-on: macos-13
|
|
||||||
needs: check-version
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- target: aarch64-apple-darwin
|
|
||||||
asset_name: meilisearch-macos-apple-silicon
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
- name: Installing Rust toolchain
|
|
||||||
uses: dtolnay/rust-toolchain@1.85
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
- name: Cargo build
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --release --target ${{ matrix.target }}
|
|
||||||
- name: Upload the binary to release
|
|
||||||
# No need to upload binaries for dry run (cron)
|
|
||||||
if: github.event_name == 'release'
|
|
||||||
uses: svenstaro/upload-release-action@2.11.2
|
|
||||||
with:
|
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
|
||||||
asset_name: ${{ matrix.asset_name }}
|
|
||||||
tag: ${{ github.ref }}
|
|
||||||
|
|
||||||
publish-aarch64:
|
|
||||||
name: Publish binary for aarch64
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: check-version
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
container:
|
|
||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
|
||||||
image: ubuntu:22.04
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- target: aarch64-unknown-linux-gnu
|
|
||||||
asset_name: meilisearch-linux-aarch64
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v5
|
|
||||||
- name: Install needed dependencies
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt upgrade -y
|
|
||||||
apt-get install -y curl build-essential gcc-aarch64-linux-gnu
|
|
||||||
- name: Set up Docker for cross compilation
|
|
||||||
run: |
|
|
||||||
apt-get install -y curl apt-transport-https ca-certificates software-properties-common
|
|
||||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
|
||||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
|
||||||
apt-get update -y && apt-get install -y docker-ce
|
|
||||||
- name: Installing Rust toolchain
|
|
||||||
uses: dtolnay/rust-toolchain@1.85
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
- name: Configure target aarch64 GNU
|
|
||||||
## Environment variable is not passed using env:
|
|
||||||
## LD gold won't work with MUSL
|
|
||||||
# env:
|
|
||||||
# JEMALLOC_SYS_WITH_LG_PAGE: 16
|
|
||||||
# RUSTFLAGS: '-Clink-arg=-fuse-ld=gold'
|
|
||||||
run: |
|
|
||||||
echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config
|
|
||||||
echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
|
||||||
echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV
|
|
||||||
- name: Install a default toolchain that will be used to build cargo cross
|
|
||||||
run: |
|
|
||||||
rustup default stable
|
|
||||||
- name: Cargo build
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
use-cross: true
|
|
||||||
args: --release --target ${{ matrix.target }}
|
|
||||||
env:
|
|
||||||
CROSS_DOCKER_IN_DOCKER: true
|
|
||||||
- name: List target output files
|
|
||||||
run: ls -lR ./target
|
|
||||||
- name: Upload the binary to release
|
|
||||||
# No need to upload binaries for dry run (cron)
|
|
||||||
if: github.event_name == 'release'
|
|
||||||
uses: svenstaro/upload-release-action@2.11.2
|
|
||||||
with:
|
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
|
||||||
asset_name: ${{ matrix.asset_name }}
|
|
||||||
tag: ${{ github.ref }}
|
tag: ${{ github.ref }}
|
||||||
|
|
||||||
publish-openapi-file:
|
publish-openapi-file:
|
||||||
name: Publish OpenAPI file
|
name: Publish OpenAPI file
|
||||||
|
needs: check-version
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -201,7 +106,7 @@ jobs:
|
|||||||
cd crates/openapi-generator
|
cd crates/openapi-generator
|
||||||
cargo run --release -- --pretty --output ../../meilisearch.json
|
cargo run --release -- --pretty --output ../../meilisearch.json
|
||||||
- name: Upload OpenAPI to Release
|
- name: Upload OpenAPI to Release
|
||||||
# No need to upload for dry run (cron)
|
# No need to upload for dry run (cron or workflow_dispatch)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.11.2
|
uses: svenstaro/upload-release-action@2.11.2
|
||||||
with:
|
with:
|
||||||
|
|||||||
20
.github/workflows/release-drafter.yml
vendored
20
.github/workflows/release-drafter.yml
vendored
@@ -1,20 +0,0 @@
|
|||||||
name: Release Drafter
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update_release_draft:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: release-drafter/release-drafter@v6
|
|
||||||
with:
|
|
||||||
config-name: release-draft-template.yml
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.RELEASE_DRAFTER_TOKEN }}
|
|
||||||
40
.github/workflows/sdks-tests.yml
vendored
40
.github/workflows/sdks-tests.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-dotnet
|
repository: meilisearch/meilisearch-dotnet
|
||||||
- name: Setup .NET Core
|
- name: Setup .NET Core
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v5
|
||||||
with:
|
with:
|
||||||
dotnet-version: "8.0.x"
|
dotnet-version: "8.0.x"
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -68,7 +68,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -92,7 +92,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -100,7 +100,7 @@ jobs:
|
|||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v6
|
||||||
with:
|
with:
|
||||||
go-version: stable
|
go-version: stable
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
@@ -122,7 +122,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -135,13 +135,13 @@ jobs:
|
|||||||
- name: Set up Java
|
- name: Set up Java
|
||||||
uses: actions/setup-java@v5
|
uses: actions/setup-java@v5
|
||||||
with:
|
with:
|
||||||
java-version: 8
|
java-version: 17
|
||||||
distribution: 'zulu'
|
distribution: 'temurin'
|
||||||
cache: gradle
|
cache: gradle
|
||||||
- name: Grant execute permission for gradlew
|
- name: Grant execute permission for gradlew
|
||||||
run: chmod +x gradlew
|
run: chmod +x gradlew
|
||||||
- name: Build and run unit and integration tests
|
- name: Build and run unit and integration tests
|
||||||
run: ./gradlew build integrationTest
|
run: ./gradlew build integrationTest --info
|
||||||
|
|
||||||
meilisearch-js-tests:
|
meilisearch-js-tests:
|
||||||
needs: define-docker-image
|
needs: define-docker-image
|
||||||
@@ -149,7 +149,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -160,7 +160,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-js
|
repository: meilisearch/meilisearch-js
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
cache: 'yarn'
|
cache: 'yarn'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -184,7 +184,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -213,7 +213,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -224,7 +224,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-python
|
repository: meilisearch/meilisearch-python
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
- name: Install pipenv
|
- name: Install pipenv
|
||||||
uses: dschep/install-pipenv-action@v1
|
uses: dschep/install-pipenv-action@v1
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -238,7 +238,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -263,7 +263,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -284,7 +284,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -307,7 +307,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -318,7 +318,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-js-plugins
|
repository: meilisearch/meilisearch-js-plugins
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
cache: yarn
|
cache: yarn
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -338,7 +338,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -370,7 +370,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch-enterprise:${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
|||||||
164
.github/workflows/test-suite.yml
vendored
164
.github/workflows/test-suite.yml
vendored
@@ -15,31 +15,40 @@ env:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-linux:
|
test-linux:
|
||||||
name: Tests on ubuntu-22.04
|
name: Tests on Ubuntu
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.runner }}
|
||||||
container:
|
strategy:
|
||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
matrix:
|
||||||
image: ubuntu:22.04
|
runner: [ubuntu-22.04, ubuntu-22.04-arm]
|
||||||
|
features: ["", "--features enterprise"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: check free space before
|
||||||
|
run: df -h
|
||||||
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
sudo rm -rf "/opt/ghc" || true
|
||||||
apt-get install build-essential -y
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- name: check free space after
|
||||||
|
run: df -h
|
||||||
- name: Setup test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
uses: dtolnay/rust-toolchain@1.85
|
uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run cargo check without any default features
|
with:
|
||||||
|
key: ${{ matrix.features }}
|
||||||
|
- name: Run cargo build without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: build
|
command: build
|
||||||
args: --locked --release --no-default-features --all
|
args: --locked --no-default-features --all
|
||||||
- name: Run cargo test
|
- name: Run cargo test
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: test
|
command: test
|
||||||
args: --locked --release --all
|
args: --locked --all ${{ matrix.features }}
|
||||||
|
|
||||||
test-others:
|
test-others:
|
||||||
name: Tests on ${{ matrix.os }}
|
name: Tests on ${{ matrix.os }}
|
||||||
@@ -47,51 +56,58 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-13, windows-2022]
|
os: [macos-14, windows-2022]
|
||||||
|
features: ["", "--features enterprise"]
|
||||||
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo build without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: build
|
command: build
|
||||||
args: --locked --release --no-default-features --all
|
args: --locked --no-default-features --all
|
||||||
- name: Run cargo test
|
- name: Run cargo test
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: test
|
command: test
|
||||||
args: --locked --release --all
|
args: --locked --all ${{ matrix.features }}
|
||||||
|
|
||||||
test-all-features:
|
test-all-features:
|
||||||
name: Tests almost all features
|
name: Tests almost all features
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
container:
|
|
||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
|
||||||
image: ubuntu:22.04
|
|
||||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
run: |
|
run: |
|
||||||
apt-get update
|
sudo rm -rf "/opt/ghc" || true
|
||||||
apt-get install --assume-yes build-essential curl
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Run cargo build with almost all features
|
- name: Run cargo build with almost all features
|
||||||
run: |
|
run: |
|
||||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
cargo build --workspace --locked --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||||
- name: Run cargo test with almost all features
|
- name: Run cargo test with almost all features
|
||||||
run: |
|
run: |
|
||||||
cargo test --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
cargo test --workspace --locked --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||||
|
|
||||||
ollama-ubuntu:
|
ollama-ubuntu:
|
||||||
name: Test with Ollama
|
name: Test with Ollama
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
env:
|
env:
|
||||||
MEILI_TEST_OLLAMA_SERVER: "http://localhost:11434"
|
MEILI_TEST_OLLAMA_SERVER: "http://localhost:11434"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
|
run: |
|
||||||
|
sudo rm -rf "/opt/ghc" || true
|
||||||
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
- name: Install Ollama
|
- name: Install Ollama
|
||||||
run: |
|
run: |
|
||||||
curl -fsSL https://ollama.com/install.sh | sudo -E sh
|
curl -fsSL https://ollama.com/install.sh | sudo -E sh
|
||||||
@@ -115,21 +131,21 @@ jobs:
|
|||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: test
|
command: test
|
||||||
args: --locked --release --all --features test-ollama ollama
|
args: --locked -p meilisearch --features test-ollama ollama
|
||||||
|
|
||||||
test-disabled-tokenization:
|
test-disabled-tokenization:
|
||||||
name: Test disabled tokenization
|
name: Test disabled tokenization
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
container:
|
|
||||||
image: ubuntu:22.04
|
|
||||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
run: |
|
run: |
|
||||||
apt-get update
|
sudo rm -rf "/opt/ghc" || true
|
||||||
apt-get install --assume-yes build-essential curl
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Run cargo tree without default features and check lindera is not present
|
- name: Run cargo tree without default features and check lindera is not present
|
||||||
run: |
|
run: |
|
||||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||||
@@ -140,36 +156,39 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
||||||
|
|
||||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
build:
|
||||||
test-debug:
|
name: Build in release
|
||||||
name: Run tests in debug
|
runs-on: ubuntu-22.04
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
|
||||||
image: ubuntu:22.04
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
sudo rm -rf "/opt/ghc" || true
|
||||||
apt-get install build-essential -y
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run tests in debug
|
- name: Build
|
||||||
uses: actions-rs/cargo@v1
|
run: cargo build --release --locked --target x86_64-unknown-linux-gnu
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
args: --locked --all
|
|
||||||
|
|
||||||
clippy:
|
clippy:
|
||||||
name: Run Clippy
|
name: Run Clippy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
features: ["", "--features enterprise"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
|
run: |
|
||||||
|
sudo rm -rf "/opt/ghc" || true
|
||||||
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
@@ -177,18 +196,21 @@ jobs:
|
|||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
args: --all-targets -- --deny warnings
|
args: --all-targets ${{ matrix.features }} -- --deny warnings
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
name: Run Rustfmt
|
name: Run Rustfmt
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
|
run: |
|
||||||
|
sudo rm -rf "/opt/ghc" || true
|
||||||
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
|
||||||
toolchain: nightly-2024-07-09
|
|
||||||
override: true
|
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
@@ -199,3 +221,23 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo -ne "\n" > crates/benchmarks/benches/datasets_paths.rs
|
echo -ne "\n" > crates/benchmarks/benches/datasets_paths.rs
|
||||||
cargo fmt --all -- --check
|
cargo fmt --all -- --check
|
||||||
|
|
||||||
|
declarative-tests:
|
||||||
|
name: Run declarative tests
|
||||||
|
runs-on: ubuntu-22.04-arm
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
|
run: |
|
||||||
|
sudo rm -rf "/opt/ghc" || true
|
||||||
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
|
- name: Cache dependencies
|
||||||
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
|
- name: Run declarative tests
|
||||||
|
run: |
|
||||||
|
cargo xtask test workloads/tests/*.json
|
||||||
|
|||||||
10
.github/workflows/update-cargo-toml-version.yml
vendored
10
.github/workflows/update-cargo-toml-version.yml
vendored
@@ -18,9 +18,13 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- name: Clean space as per https://github.com/actions/virtual-environments/issues/709
|
||||||
with:
|
run: |
|
||||||
profile: minimal
|
sudo rm -rf "/opt/ghc" || true
|
||||||
|
sudo rm -rf "/usr/share/dotnet" || true
|
||||||
|
sudo rm -rf "/usr/local/lib/android" || true
|
||||||
|
sudo rm -rf "/usr/local/share/boost" || true
|
||||||
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Install sd
|
- name: Install sd
|
||||||
run: cargo install sd
|
run: cargo install sd
|
||||||
- name: Update Cargo.toml file
|
- name: Update Cargo.toml file
|
||||||
|
|||||||
@@ -124,6 +124,7 @@ They are JSON files with the following structure (comments are not actually supp
|
|||||||
{
|
{
|
||||||
// Name of the workload. Must be unique to the workload, as it will be used to group results on the dashboard.
|
// Name of the workload. Must be unique to the workload, as it will be used to group results on the dashboard.
|
||||||
"name": "hackernews.ndjson_1M,no-threads",
|
"name": "hackernews.ndjson_1M,no-threads",
|
||||||
|
"type": "bench",
|
||||||
// Number of consecutive runs of the commands that should be performed.
|
// Number of consecutive runs of the commands that should be performed.
|
||||||
// Each run uses a fresh instance of Meilisearch and a fresh database.
|
// Each run uses a fresh instance of Meilisearch and a fresh database.
|
||||||
// Each run produces its own report file.
|
// Each run produces its own report file.
|
||||||
|
|||||||
2507
Cargo.lock
generated
2507
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.20.0"
|
version = "1.29.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Quentin de Quelen <quentin@dequelen.me>",
|
"Quentin de Quelen <quentin@dequelen.me>",
|
||||||
"Clément Renault <clement@meilisearch.com>",
|
"Clément Renault <clement@meilisearch.com>",
|
||||||
@@ -50,3 +50,5 @@ opt-level = 3
|
|||||||
opt-level = 3
|
opt-level = 3
|
||||||
[profile.dev.package.roaring]
|
[profile.dev.package.roaring]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
[profile.dev.package.gemm-f16]
|
||||||
|
opt-level = 3
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
[build.env]
|
|
||||||
passthrough = [
|
|
||||||
"RUST_BACKTRACE",
|
|
||||||
"CARGO_TERM_COLOR",
|
|
||||||
"RUSTFLAGS",
|
|
||||||
"JEMALLOC_SYS_WITH_LG_PAGE"
|
|
||||||
]
|
|
||||||
10
Dockerfile
10
Dockerfile
@@ -1,5 +1,5 @@
|
|||||||
# Compile
|
# Compile
|
||||||
FROM rust:1.85-alpine3.20 AS compiler
|
FROM rust:1.89-alpine3.22 AS compiler
|
||||||
|
|
||||||
RUN apk add -q --no-cache build-base openssl-dev
|
RUN apk add -q --no-cache build-base openssl-dev
|
||||||
|
|
||||||
@@ -8,19 +8,17 @@ WORKDIR /
|
|||||||
ARG COMMIT_SHA
|
ARG COMMIT_SHA
|
||||||
ARG COMMIT_DATE
|
ARG COMMIT_DATE
|
||||||
ARG GIT_TAG
|
ARG GIT_TAG
|
||||||
|
ARG EXTRA_ARGS
|
||||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_DESCRIBE=${GIT_TAG}
|
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_DESCRIBE=${GIT_TAG}
|
||||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN set -eux; \
|
RUN set -eux; \
|
||||||
apkArch="$(apk --print-arch)"; \
|
apkArch="$(apk --print-arch)"; \
|
||||||
if [ "$apkArch" = "aarch64" ]; then \
|
cargo build --release -p meilisearch -p meilitool ${EXTRA_ARGS}
|
||||||
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
|
||||||
fi && \
|
|
||||||
cargo build --release -p meilisearch -p meilitool
|
|
||||||
|
|
||||||
# Run
|
# Run
|
||||||
FROM alpine:3.20
|
FROM alpine:3.22
|
||||||
LABEL org.opencontainers.image.source="https://github.com/meilisearch/meilisearch"
|
LABEL org.opencontainers.image.source="https://github.com/meilisearch/meilisearch"
|
||||||
|
|
||||||
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
||||||
|
|||||||
28
LICENSE
28
LICENSE
@@ -1,29 +1,9 @@
|
|||||||
MIT License
|
# License
|
||||||
|
|
||||||
Copyright (c) 2019-2025 Meili SAS
|
Copyright (c) 2019-2025 Meili SAS
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Part of this work fall under the Meilisearch Enterprise Edition (EE) and are licensed under the Business Source License 1.1, please refer to [LICENSE-EE](./LICENSE-EE) for details.
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
The other parts of this work are licensed under the [MIT license](./LICENSE-MIT).
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
`SPDX-License-Identifier: MIT AND BUSL-1.1`
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
🔒 Meilisearch Enterprise Edition (EE)
|
|
||||||
|
|
||||||
Certain parts of this codebase are not licensed under the MIT license and governed by the Business Source License 1.1.
|
|
||||||
|
|
||||||
See the LICENSE-EE file for details.
|
|
||||||
21
LICENSE-MIT
Normal file
21
LICENSE-MIT
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019-2025 Meili SAS
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
@@ -39,6 +39,7 @@
|
|||||||
## 🖥 Examples
|
## 🖥 Examples
|
||||||
|
|
||||||
- [**Movies**](https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=organization) — An application to help you find streaming platforms to watch movies using [hybrid search](https://www.meilisearch.com/solutions/hybrid-search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos).
|
- [**Movies**](https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=organization) — An application to help you find streaming platforms to watch movies using [hybrid search](https://www.meilisearch.com/solutions/hybrid-search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos).
|
||||||
|
- [**Flickr**](https://flickr.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=organization) — Search and explore one hundred million Flickr images with semantic search.
|
||||||
- [**Ecommerce**](https://ecommerce.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Ecommerce website using disjunctive [facets](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos), range and rating filtering, and pagination.
|
- [**Ecommerce**](https://ecommerce.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Ecommerce website using disjunctive [facets](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos), range and rating filtering, and pagination.
|
||||||
- [**Songs**](https://music.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search through 47 million of songs.
|
- [**Songs**](https://music.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search through 47 million of songs.
|
||||||
- [**SaaS**](https://saas.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search for contacts, deals, and companies in this [multi-tenant](https://www.meilisearch.com/docs/learn/security/multitenancy_tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) CRM application.
|
- [**SaaS**](https://saas.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search for contacts, deals, and companies in this [multi-tenant](https://www.meilisearch.com/docs/learn/security/multitenancy_tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) CRM application.
|
||||||
@@ -121,7 +122,7 @@ If you want to know more about the kind of data we collect and what we use it fo
|
|||||||
|
|
||||||
Meilisearch is a search engine created by [Meili](https://www.meilisearch.com/careers), a software development company headquartered in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
Meilisearch is a search engine created by [Meili](https://www.meilisearch.com/careers), a software development company headquartered in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
||||||
|
|
||||||
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
🗞 [Subscribe to our newsletter](https://share-eu1.hsforms.com/1LN5N0x_GQgq7ss7tXmSykwfg3aq) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
||||||
|
|
||||||
💌 Want to make a suggestion or give feedback? Here are some of the channels where you can reach us:
|
💌 Want to make a suggestion or give feedback? Here are some of the channels where you can reach us:
|
||||||
|
|
||||||
|
|||||||
326
TESTING.md
Normal file
326
TESTING.md
Normal file
@@ -0,0 +1,326 @@
|
|||||||
|
# Declarative tests
|
||||||
|
|
||||||
|
Declarative tests ensure that Meilisearch features remain stable across versions.
|
||||||
|
|
||||||
|
While we already have unit tests, those are run against **temporary databases** that are created fresh each time and therefore never risk corruption.
|
||||||
|
|
||||||
|
Declarative tests instead **simulate the lifetime of a database**: they chain together commands and requests to change the binary, verifying that database state and API responses remain consistent.
|
||||||
|
|
||||||
|
## Basic example
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"type": "test",
|
||||||
|
"name": "api-keys",
|
||||||
|
"binary": { // the first command will run on the binary following this specification.
|
||||||
|
"source": "release", // get the binary as a release from GitHub
|
||||||
|
"version": "1.19.0", // version to fetch
|
||||||
|
"edition": "community" // edition to fetch
|
||||||
|
},
|
||||||
|
"commands": []
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This example defines a no-op test (it does nothing).
|
||||||
|
|
||||||
|
If the file is saved at `workloads/tests/example.json`, you can run it with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo xtask test workloads/tests/example.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
Commands represent API requests sent to Meilisearch endpoints during a test.
|
||||||
|
|
||||||
|
They are executed sequentially, and their responses can be validated to ensure consistent behavior across upgrades.
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
|
||||||
|
{
|
||||||
|
"route": "keys",
|
||||||
|
"method": "POST",
|
||||||
|
"body": {
|
||||||
|
"inline": {
|
||||||
|
"actions": [
|
||||||
|
"search",
|
||||||
|
"documents.add"
|
||||||
|
],
|
||||||
|
"description": "Test API Key",
|
||||||
|
"expiresAt": null,
|
||||||
|
"indexes": [ "movies" ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This command issues a `POST /keys` request, creating an API key with permissions to search and add documents in the `movies` index.
|
||||||
|
|
||||||
|
### Using assets in commands
|
||||||
|
|
||||||
|
To keep tests concise and reusable, you can define **assets** at the root of the workload file.
|
||||||
|
|
||||||
|
Assets are external data sources (such as datasets) that are cached between runs, making tests faster and easier to read.
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"type": "test",
|
||||||
|
"name": "movies",
|
||||||
|
"binary": {
|
||||||
|
"source": "release",
|
||||||
|
"version": "1.19.0",
|
||||||
|
"edition": "community"
|
||||||
|
},
|
||||||
|
"assets": {
|
||||||
|
"movies.json": {
|
||||||
|
"local_location": null,
|
||||||
|
"remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/movies.json",
|
||||||
|
"sha256": "5b6e4cb660bc20327776e8a33ea197b43d9ec84856710ead1cc87ab24df77de1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"commands": [
|
||||||
|
{
|
||||||
|
"route": "indexes/movies/documents",
|
||||||
|
"method": "POST",
|
||||||
|
"body": {
|
||||||
|
"asset": "movies.json"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example:
|
||||||
|
- The `movies.json` dataset is defined as an asset, pointing to a remote URL.
|
||||||
|
- The SHA-256 checksum ensures integrity.
|
||||||
|
- The `POST /indexes/movies/documents` command uses this asset as the request body.
|
||||||
|
|
||||||
|
This makes the test much cleaner than inlining a large dataset directly into the command.
|
||||||
|
|
||||||
|
For asset handling, please refer to the [declarative benchmarks documentation](/BENCHMARKS.md#adding-new-assets).
|
||||||
|
|
||||||
|
### Asserting responses
|
||||||
|
|
||||||
|
Commands can specify both the **expected status code** and the **expected response body**.
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"route": "indexes/movies/documents",
|
||||||
|
"method": "POST",
|
||||||
|
"body": {
|
||||||
|
"asset": "movies.json"
|
||||||
|
},
|
||||||
|
"expectedStatus": 202,
|
||||||
|
"expectedResponse": {
|
||||||
|
"enqueuedAt": "[timestamp]", // Set to a bracketed string to ignore the value
|
||||||
|
"indexUid": "movies",
|
||||||
|
"status": "enqueued",
|
||||||
|
"taskUid": 1,
|
||||||
|
"type": "documentAdditionOrUpdate"
|
||||||
|
},
|
||||||
|
"synchronous": "WaitForTask"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Manually writing `expectedResponse` fields can be tedious.
|
||||||
|
|
||||||
|
Instead, you can let the test runner populate them automatically:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run the workload to populate expected fields. Only adds the missing ones, doesn't change existing data
|
||||||
|
cargo xtask test workloads/tests/example.json --add-missing-responses
|
||||||
|
|
||||||
|
# OR
|
||||||
|
|
||||||
|
# Run the workload to populate expected fields. Updates all fields including existing ones
|
||||||
|
cargo xtask test workloads/tests/example.json --update-responses
|
||||||
|
```
|
||||||
|
|
||||||
|
This workflow is recommended:
|
||||||
|
|
||||||
|
1. Write the test without expected fields.
|
||||||
|
2. Run it with `--add-missing-responses` to capture the actual responses.
|
||||||
|
3. Review and commit the generated expectations.
|
||||||
|
|
||||||
|
## Changing binary
|
||||||
|
|
||||||
|
It is possible to insert an instruction to change the current Meilisearch instance from one binary specification to another during a test.
|
||||||
|
|
||||||
|
When executed, such an instruction will:
|
||||||
|
1. Stop the current Meilisearch instance.
|
||||||
|
2. Fetch the binary specified by the instruction.
|
||||||
|
3. Restart the server with the specified binary on the same database.
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"type": "test",
|
||||||
|
"name": "movies",
|
||||||
|
"binary": {
|
||||||
|
"source": "release",
|
||||||
|
"version": "1.19.0", // start with version v1.19.0
|
||||||
|
"edition": "community"
|
||||||
|
},
|
||||||
|
"assets": {
|
||||||
|
"movies.json": {
|
||||||
|
"local_location": null,
|
||||||
|
"remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/movies.json",
|
||||||
|
"sha256": "5b6e4cb660bc20327776e8a33ea197b43d9ec84856710ead1cc87ab24df77de1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"commands": [
|
||||||
|
// setup some data
|
||||||
|
{
|
||||||
|
"route": "indexes/movies/documents",
|
||||||
|
"method": "POST",
|
||||||
|
"body": {
|
||||||
|
"asset": "movies.json"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// switch binary to v1.24.0
|
||||||
|
{
|
||||||
|
"binary": {
|
||||||
|
"source": "release",
|
||||||
|
"version": "1.24.0",
|
||||||
|
"edition": "community"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Typical Usage
|
||||||
|
|
||||||
|
In most cases, the change binary instruction will be used to update a database.
|
||||||
|
|
||||||
|
- **Set up** some data using commands on an older version.
|
||||||
|
- **Upgrade** to the latest version.
|
||||||
|
- **Assert** that the data and API behavior remain correct after the upgrade.
|
||||||
|
|
||||||
|
To properly test the dumpless upgrade, one should typically:
|
||||||
|
|
||||||
|
1. Open the database without processing the update task: Use a `binary` instruction to switch to the desired version, passing `--experimental-dumpless-upgrade` and `--experimental-max-number-of-batched-tasks=0` as extra CLI arguments
|
||||||
|
2. Check that the search, stats and task queue still work.
|
||||||
|
3. Open the database and process the update task: Use a `binary` instruction to switch to the desired version, passing `--experimental-dumpless-upgrade` as the extra CLI argument. Use a `health` command to wait for the upgrade task to finish.
|
||||||
|
4. Check that the indexing, search, stats, and task queue still work.
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"type": "test",
|
||||||
|
"name": "movies",
|
||||||
|
"binary": {
|
||||||
|
"source": "release",
|
||||||
|
"version": "1.12.0",
|
||||||
|
"edition": "community"
|
||||||
|
},
|
||||||
|
"commands": [
|
||||||
|
// 0. Run commands to populate the database
|
||||||
|
{
|
||||||
|
// ..
|
||||||
|
},
|
||||||
|
// 1. Open the database with new MS without processing the update task
|
||||||
|
{
|
||||||
|
"binary": {
|
||||||
|
"source": "build", // build the binary from the sources in the current git repository
|
||||||
|
"edition": "community",
|
||||||
|
"extraCliArgs": [
|
||||||
|
"--experimental-dumpless-upgrade", // allows to open with a newer MS
|
||||||
|
"--experimental-max-number-of-batched-tasks=0" // prevent processing of the update task
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// 2. Check the search etc.
|
||||||
|
{
|
||||||
|
// ..
|
||||||
|
},
|
||||||
|
// 3. Open the database with new MS and processing the update task
|
||||||
|
{
|
||||||
|
"binary": {
|
||||||
|
"source": "build", // build the binary from the sources in the current git repository
|
||||||
|
"edition": "community",
|
||||||
|
"extraCliArgs": [
|
||||||
|
"--experimental-dumpless-upgrade" // allows to open with a newer MS
|
||||||
|
// no `--experimental-max-number-of-batched-tasks=0`
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// 4. Check the indexing, search, etc.
|
||||||
|
{
|
||||||
|
// ..
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures backward compatibility: databases created with older Meilisearch versions should remain functional and consistent after an upgrade.
|
||||||
|
|
||||||
|
## Variables
|
||||||
|
|
||||||
|
Sometimes a command needs to use a value returned by a **previous response**.
|
||||||
|
These values can be captured and reused using the register field.
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"route": "keys",
|
||||||
|
"method": "POST",
|
||||||
|
"body": {
|
||||||
|
"inline": {
|
||||||
|
"actions": [
|
||||||
|
"search",
|
||||||
|
"documents.add"
|
||||||
|
],
|
||||||
|
"description": "Test API Key",
|
||||||
|
"expiresAt": null,
|
||||||
|
"indexes": [ "movies" ]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"expectedResponse": {
|
||||||
|
"key": "c6f64630bad2996b1f675007c8800168e14adf5d6a7bb1a400a6d2b158050eaf",
|
||||||
|
// ...
|
||||||
|
},
|
||||||
|
"register": {
|
||||||
|
"key": "/key"
|
||||||
|
},
|
||||||
|
"synchronous": "WaitForResponse"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The `register` field captures the value at the JSON path `/key` from the response.
|
||||||
|
Paths follow the **JavaScript Object Notation Pointer (RFC 6901)** format.
|
||||||
|
Registered variables are available for all subsequent commands.
|
||||||
|
|
||||||
|
Registered variables can be referenced by wrapping their name in double curly braces:
|
||||||
|
|
||||||
|
In the route/path:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"route": "tasks/{{ task_id }}",
|
||||||
|
"method": "GET"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
In the request body:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"route": "indexes/movies/documents",
|
||||||
|
"method": "PATCH",
|
||||||
|
"body": {
|
||||||
|
"inline": {
|
||||||
|
"id": "{{ document_id }}",
|
||||||
|
"overview": "Shazam turns evil and the world is in danger.",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Or they can be referenced by their name (**without curly braces**) as an API key:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"route": "indexes/movies/documents",
|
||||||
|
"method": "POST",
|
||||||
|
"body": { /* ... */ },
|
||||||
|
"apiKeyVariable": "key" // The **content** of the key variable will be used as an API key
|
||||||
|
}
|
||||||
|
```
|
||||||
@@ -11,27 +11,27 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.100"
|
||||||
bumpalo = "3.18.1"
|
bumpalo = "3.19.0"
|
||||||
csv = "1.3.1"
|
csv = "1.4.0"
|
||||||
memmap2 = "0.9.7"
|
memmap2 = "0.9.9"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
mimalloc = { version = "0.1.47", default-features = false }
|
mimalloc = { version = "0.1.48", default-features = false }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.145", features = ["preserve_order"] }
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.23.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
criterion = { version = "0.7.0", features = ["html_reports"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rand_chacha = "0.3.1"
|
rand_chacha = "0.3.1"
|
||||||
roaring = "0.10.12"
|
roaring = "0.10.12"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.100"
|
||||||
bytes = "1.10.1"
|
bytes = "1.11.0"
|
||||||
convert_case = "0.8.0"
|
convert_case = "0.9.0"
|
||||||
flate2 = "1.1.2"
|
flate2 = "1.1.5"
|
||||||
reqwest = { version = "0.12.20", features = ["blocking", "rustls-tls"], default-features = false }
|
reqwest = { version = "0.12.24", features = ["blocking", "rustls-tls"], default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["milli/all-tokenizations"]
|
default = ["milli/all-tokenizations"]
|
||||||
|
|||||||
@@ -21,6 +21,10 @@ use roaring::RoaringBitmap;
|
|||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||||
|
|
||||||
|
fn no_cancel() -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
const BENCHMARK_ITERATION: usize = 10;
|
const BENCHMARK_ITERATION: usize = 10;
|
||||||
|
|
||||||
fn setup_dir(path: impl AsRef<Path>) {
|
fn setup_dir(path: impl AsRef<Path>) {
|
||||||
@@ -65,7 +69,7 @@ fn setup_settings<'t>(
|
|||||||
let sortable_fields = sortable_fields.iter().map(|s| s.to_string()).collect();
|
let sortable_fields = sortable_fields.iter().map(|s| s.to_string()).collect();
|
||||||
builder.set_sortable_fields(sortable_fields);
|
builder.set_sortable_fields(sortable_fields);
|
||||||
|
|
||||||
builder.execute(&|| false, &Progress::default(), Default::default()).unwrap();
|
builder.execute(&no_cancel, &Progress::default(), Default::default()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup_index_with_settings(
|
fn setup_index_with_settings(
|
||||||
@@ -152,7 +156,7 @@ fn indexing_songs_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -168,7 +172,7 @@ fn indexing_songs_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -220,7 +224,7 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -236,7 +240,7 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -266,7 +270,7 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -282,7 +286,7 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -336,7 +340,7 @@ fn deleting_songs_in_batches_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -352,7 +356,7 @@ fn deleting_songs_in_batches_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -414,7 +418,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -430,7 +434,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -460,7 +464,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -476,7 +480,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -502,7 +506,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -518,7 +522,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -571,7 +575,7 @@ fn indexing_songs_without_faceted_numbers(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -587,7 +591,7 @@ fn indexing_songs_without_faceted_numbers(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -639,7 +643,7 @@ fn indexing_songs_without_faceted_fields(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -655,7 +659,7 @@ fn indexing_songs_without_faceted_fields(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -707,7 +711,7 @@ fn indexing_wiki(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -723,7 +727,7 @@ fn indexing_wiki(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -774,7 +778,7 @@ fn reindexing_wiki(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -790,7 +794,7 @@ fn reindexing_wiki(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -820,7 +824,7 @@ fn reindexing_wiki(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -836,7 +840,7 @@ fn reindexing_wiki(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -889,7 +893,7 @@ fn deleting_wiki_in_batches_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -905,7 +909,7 @@ fn deleting_wiki_in_batches_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -967,7 +971,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -983,7 +987,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1014,7 +1018,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1030,7 +1034,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1057,7 +1061,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1073,7 +1077,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1125,7 +1129,7 @@ fn indexing_movies_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1141,7 +1145,7 @@ fn indexing_movies_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1192,7 +1196,7 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1208,7 +1212,7 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1238,7 +1242,7 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1254,7 +1258,7 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1307,7 +1311,7 @@ fn deleting_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1323,7 +1327,7 @@ fn deleting_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1372,7 +1376,7 @@ fn delete_documents_from_ids(index: Index, document_ids_to_delete: Vec<RoaringBi
|
|||||||
Some(primary_key),
|
Some(primary_key),
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1422,7 +1426,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1438,7 +1442,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1468,7 +1472,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1484,7 +1488,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1510,7 +1514,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1526,7 +1530,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1601,7 +1605,7 @@ fn indexing_nested_movies_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1617,7 +1621,7 @@ fn indexing_nested_movies_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1693,7 +1697,7 @@ fn deleting_nested_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1709,7 +1713,7 @@ fn deleting_nested_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1777,7 +1781,7 @@ fn indexing_nested_movies_without_faceted_fields(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1793,7 +1797,7 @@ fn indexing_nested_movies_without_faceted_fields(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1845,7 +1849,7 @@ fn indexing_geo(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1861,7 +1865,7 @@ fn indexing_geo(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1912,7 +1916,7 @@ fn reindexing_geo(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1928,7 +1932,7 @@ fn reindexing_geo(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -1958,7 +1962,7 @@ fn reindexing_geo(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -1974,7 +1978,7 @@ fn reindexing_geo(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
@@ -2027,7 +2031,7 @@ fn deleting_geo_in_batches_default(c: &mut Criterion) {
|
|||||||
&rtxn,
|
&rtxn,
|
||||||
None,
|
None,
|
||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&no_cancel,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
@@ -2043,7 +2047,7 @@ fn deleting_geo_in_batches_default(c: &mut Criterion) {
|
|||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
RuntimeEmbedders::default(),
|
||||||
&|| false,
|
&no_cancel,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ license.workspace = true
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
time = { version = "0.3.41", features = ["parsing"] }
|
time = { version = "0.3.44", features = ["parsing"] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.100"
|
||||||
vergen-git2 = "1.0.7"
|
vergen-gitcl = "1.0.8"
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ fn emit_git_variables() -> anyhow::Result<()> {
|
|||||||
// Note: any code that needs VERGEN_ environment variables should take care to define them manually in the Dockerfile and pass them
|
// Note: any code that needs VERGEN_ environment variables should take care to define them manually in the Dockerfile and pass them
|
||||||
// in the corresponding GitHub workflow (publish_docker.yml).
|
// in the corresponding GitHub workflow (publish_docker.yml).
|
||||||
// This is due to the Dockerfile building the binary outside of the git directory.
|
// This is due to the Dockerfile building the binary outside of the git directory.
|
||||||
let mut builder = vergen_git2::Git2Builder::default();
|
let mut builder = vergen_gitcl::GitclBuilder::default();
|
||||||
|
|
||||||
builder.branch(true);
|
builder.branch(true);
|
||||||
builder.commit_timestamp(true);
|
builder.commit_timestamp(true);
|
||||||
@@ -25,5 +25,5 @@ fn emit_git_variables() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
let git2 = builder.build()?;
|
let git2 = builder.build()?;
|
||||||
|
|
||||||
vergen_git2::Emitter::default().fail_on_error().add_instructions(&git2)?.emit()
|
vergen_gitcl::Emitter::default().fail_on_error().add_instructions(&git2)?.emit()
|
||||||
}
|
}
|
||||||
|
|||||||
6
crates/build-info/src/main.rs
Normal file
6
crates/build-info/src/main.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
use build_info::BuildInfo;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let info = BuildInfo::from_build();
|
||||||
|
dbg!(info);
|
||||||
|
}
|
||||||
@@ -11,24 +11,27 @@ readme.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.100"
|
||||||
flate2 = "1.1.2"
|
flate2 = "1.1.5"
|
||||||
http = "1.3.1"
|
http = "1.3.1"
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
once_cell = "1.21.3"
|
once_cell = "1.21.3"
|
||||||
regex = "1.11.1"
|
regex = "1.12.2"
|
||||||
roaring = { version = "0.10.12", features = ["serde"] }
|
roaring = { version = "0.10.12", features = ["serde"] }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.228", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.145", features = ["preserve_order"] }
|
||||||
tar = "0.4.44"
|
tar = "0.4.44"
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.23.0"
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.17"
|
||||||
time = { version = "0.3.41", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.44", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
uuid = { version = "1.18.1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
enterprise = ["meilisearch-types/enterprise"]
|
||||||
@@ -96,6 +96,8 @@ pub struct TaskDump {
|
|||||||
pub finished_at: Option<OffsetDateTime>,
|
pub finished_at: Option<OffsetDateTime>,
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
pub network: Option<TaskNetwork>,
|
pub network: Option<TaskNetwork>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub custom_metadata: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// A `Kind` specific version made for the dump. If modified you may break the dump.
|
// A `Kind` specific version made for the dump. If modified you may break the dump.
|
||||||
@@ -158,6 +160,9 @@ pub enum KindDump {
|
|||||||
UpgradeDatabase {
|
UpgradeDatabase {
|
||||||
from: (u32, u32, u32),
|
from: (u32, u32, u32),
|
||||||
},
|
},
|
||||||
|
IndexCompaction {
|
||||||
|
index_uid: String,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Task> for TaskDump {
|
impl From<Task> for TaskDump {
|
||||||
@@ -175,6 +180,7 @@ impl From<Task> for TaskDump {
|
|||||||
started_at: task.started_at,
|
started_at: task.started_at,
|
||||||
finished_at: task.finished_at,
|
finished_at: task.finished_at,
|
||||||
network: task.network,
|
network: task.network,
|
||||||
|
custom_metadata: task.custom_metadata,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -240,6 +246,9 @@ impl From<KindWithContent> for KindDump {
|
|||||||
KindWithContent::UpgradeDatabase { from: version } => {
|
KindWithContent::UpgradeDatabase { from: version } => {
|
||||||
KindDump::UpgradeDatabase { from: version }
|
KindDump::UpgradeDatabase { from: version }
|
||||||
}
|
}
|
||||||
|
KindWithContent::IndexCompaction { index_uid } => {
|
||||||
|
KindDump::IndexCompaction { index_uid }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -253,13 +262,13 @@ pub(crate) mod test {
|
|||||||
use big_s::S;
|
use big_s::S;
|
||||||
use maplit::{btreemap, btreeset};
|
use maplit::{btreemap, btreeset};
|
||||||
use meilisearch_types::batches::{Batch, BatchEnqueuedAt, BatchStats};
|
use meilisearch_types::batches::{Batch, BatchEnqueuedAt, BatchStats};
|
||||||
use meilisearch_types::enterprise_edition::network::{Network, Remote};
|
|
||||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||||
use meilisearch_types::features::RuntimeTogglableFeatures;
|
use meilisearch_types::features::RuntimeTogglableFeatures;
|
||||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use meilisearch_types::keys::{Action, Key};
|
||||||
use meilisearch_types::milli::update::Setting;
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::milli::{self, FilterableAttributesRule};
|
use meilisearch_types::milli::{self, FilterableAttributesRule};
|
||||||
|
use meilisearch_types::network::{Network, Remote};
|
||||||
use meilisearch_types::settings::{Checked, FacetingSettings, Settings};
|
use meilisearch_types::settings::{Checked, FacetingSettings, Settings};
|
||||||
use meilisearch_types::task_view::DetailsView;
|
use meilisearch_types::task_view::DetailsView;
|
||||||
use meilisearch_types::tasks::{BatchStopReason, Details, Kind, Status};
|
use meilisearch_types::tasks::{BatchStopReason, Details, Kind, Status};
|
||||||
@@ -331,6 +340,7 @@ pub(crate) mod test {
|
|||||||
facet_search: Setting::NotSet,
|
facet_search: Setting::NotSet,
|
||||||
prefix_search: Setting::NotSet,
|
prefix_search: Setting::NotSet,
|
||||||
chat: Setting::NotSet,
|
chat: Setting::NotSet,
|
||||||
|
vector_store: Setting::NotSet,
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
};
|
};
|
||||||
settings.check()
|
settings.check()
|
||||||
@@ -389,6 +399,7 @@ pub(crate) mod test {
|
|||||||
started_at: Some(datetime!(2022-11-20 0:00 UTC)),
|
started_at: Some(datetime!(2022-11-20 0:00 UTC)),
|
||||||
finished_at: Some(datetime!(2022-11-21 0:00 UTC)),
|
finished_at: Some(datetime!(2022-11-21 0:00 UTC)),
|
||||||
network: None,
|
network: None,
|
||||||
|
custom_metadata: None,
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
@@ -414,6 +425,7 @@ pub(crate) mod test {
|
|||||||
started_at: None,
|
started_at: None,
|
||||||
finished_at: None,
|
finished_at: None,
|
||||||
network: None,
|
network: None,
|
||||||
|
custom_metadata: None,
|
||||||
},
|
},
|
||||||
Some(vec![
|
Some(vec![
|
||||||
json!({ "id": 4, "race": "leonberg" }).as_object().unwrap().clone(),
|
json!({ "id": 4, "race": "leonberg" }).as_object().unwrap().clone(),
|
||||||
@@ -434,6 +446,7 @@ pub(crate) mod test {
|
|||||||
started_at: None,
|
started_at: None,
|
||||||
finished_at: None,
|
finished_at: None,
|
||||||
network: None,
|
network: None,
|
||||||
|
custom_metadata: None,
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -97,6 +97,7 @@ impl CompatV2ToV3 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
pub enum CompatIndexV2ToV3 {
|
pub enum CompatIndexV2ToV3 {
|
||||||
V2(v2::V2IndexReader),
|
V2(v2::V2IndexReader),
|
||||||
Compat(Box<CompatIndexV1ToV2>),
|
Compat(Box<CompatIndexV1ToV2>),
|
||||||
|
|||||||
@@ -164,6 +164,7 @@ impl CompatV5ToV6 {
|
|||||||
started_at: task_view.started_at,
|
started_at: task_view.started_at,
|
||||||
finished_at: task_view.finished_at,
|
finished_at: task_view.finished_at,
|
||||||
network: None,
|
network: None,
|
||||||
|
custom_metadata: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
(task, content_file)
|
(task, content_file)
|
||||||
@@ -421,6 +422,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
facet_search: v6::Setting::NotSet,
|
facet_search: v6::Setting::NotSet,
|
||||||
prefix_search: v6::Setting::NotSet,
|
prefix_search: v6::Setting::NotSet,
|
||||||
chat: v6::Setting::NotSet,
|
chat: v6::Setting::NotSet,
|
||||||
|
vector_store: v6::Setting::NotSet,
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use std::io::{BufRead, BufReader, ErrorKind};
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
pub use meilisearch_types::milli;
|
pub use meilisearch_types::milli;
|
||||||
use meilisearch_types::milli::vector::hf::OverridePooling;
|
use meilisearch_types::milli::vector::embedder::hf::OverridePooling;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
@@ -24,7 +24,7 @@ pub type Batch = meilisearch_types::batches::Batch;
|
|||||||
pub type Key = meilisearch_types::keys::Key;
|
pub type Key = meilisearch_types::keys::Key;
|
||||||
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
|
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
|
||||||
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
||||||
pub type Network = meilisearch_types::enterprise_edition::network::Network;
|
pub type Network = meilisearch_types::network::Network;
|
||||||
pub type Webhooks = meilisearch_types::webhooks::WebhooksDumpView;
|
pub type Webhooks = meilisearch_types::webhooks::WebhooksDumpView;
|
||||||
|
|
||||||
// ===== Other types to clarify the code of the compat module
|
// ===== Other types to clarify the code of the compat module
|
||||||
|
|||||||
@@ -5,9 +5,9 @@ use std::path::PathBuf;
|
|||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use meilisearch_types::batches::Batch;
|
use meilisearch_types::batches::Batch;
|
||||||
use meilisearch_types::enterprise_edition::network::Network;
|
|
||||||
use meilisearch_types::features::{ChatCompletionSettings, RuntimeTogglableFeatures};
|
use meilisearch_types::features::{ChatCompletionSettings, RuntimeTogglableFeatures};
|
||||||
use meilisearch_types::keys::Key;
|
use meilisearch_types::keys::Key;
|
||||||
|
use meilisearch_types::network::Network;
|
||||||
use meilisearch_types::settings::{Checked, Settings};
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
use meilisearch_types::webhooks::WebhooksDumpView;
|
use meilisearch_types::webhooks::WebhooksDumpView;
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.23.0"
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.17"
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
uuid = { version = "1.18.1", features = ["serde", "v4"] }
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ impl FileStore {
|
|||||||
|
|
||||||
/// Returns the file corresponding to the requested uuid.
|
/// Returns the file corresponding to the requested uuid.
|
||||||
pub fn get_update(&self, uuid: Uuid) -> Result<StdFile> {
|
pub fn get_update(&self, uuid: Uuid) -> Result<StdFile> {
|
||||||
let path = self.get_update_path(uuid);
|
let path = self.update_path(uuid);
|
||||||
let file = match StdFile::open(path) {
|
let file = match StdFile::open(path) {
|
||||||
Ok(file) => file,
|
Ok(file) => file,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@@ -72,7 +72,7 @@ impl FileStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the path that correspond to this uuid, the path could not exists.
|
/// Returns the path that correspond to this uuid, the path could not exists.
|
||||||
pub fn get_update_path(&self, uuid: Uuid) -> PathBuf {
|
pub fn update_path(&self, uuid: Uuid) -> PathBuf {
|
||||||
self.path.join(uuid.to_string())
|
self.path.join(uuid.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -7,23 +7,14 @@
|
|||||||
|
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::tag;
|
use nom::bytes::complete::tag;
|
||||||
use nom::character::complete::char;
|
use nom::character::complete::{char, multispace0, multispace1};
|
||||||
use nom::character::complete::multispace0;
|
use nom::combinator::{cut, map, value};
|
||||||
use nom::character::complete::multispace1;
|
use nom::sequence::{preceded, terminated, tuple};
|
||||||
use nom::combinator::cut;
|
|
||||||
use nom::combinator::map;
|
|
||||||
use nom::combinator::value;
|
|
||||||
use nom::sequence::preceded;
|
|
||||||
use nom::sequence::{terminated, tuple};
|
|
||||||
use Condition::*;
|
use Condition::*;
|
||||||
|
|
||||||
use crate::error::IResultExt;
|
use crate::error::IResultExt;
|
||||||
use crate::value::parse_vector_value;
|
use crate::value::{parse_vector_value, parse_vector_value_cut};
|
||||||
use crate::value::parse_vector_value_cut;
|
use crate::{parse_value, Error, ErrorKind, FilterCondition, IResult, Span, Token, VectorFilter};
|
||||||
use crate::Error;
|
|
||||||
use crate::ErrorKind;
|
|
||||||
use crate::VectorFilter;
|
|
||||||
use crate::{parse_value, FilterCondition, IResult, Span, Token};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum Condition<'a> {
|
pub enum Condition<'a> {
|
||||||
@@ -124,7 +115,7 @@ pub fn parse_not_exists(input: Span) -> IResult<FilterCondition> {
|
|||||||
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter<'_>)> {
|
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter)> {
|
||||||
let (input, _) = multispace0(input)?;
|
let (input, _) = multispace0(input)?;
|
||||||
let (input, fid) = tag("_vectors")(input)?;
|
let (input, fid) = tag("_vectors")(input)?;
|
||||||
|
|
||||||
|
|||||||
@@ -75,7 +75,11 @@ pub enum ExpectedValueKind {
|
|||||||
pub enum ErrorKind<'a> {
|
pub enum ErrorKind<'a> {
|
||||||
ReservedGeo(&'a str),
|
ReservedGeo(&'a str),
|
||||||
GeoRadius,
|
GeoRadius,
|
||||||
|
GeoRadiusArgumentCount(usize),
|
||||||
GeoBoundingBox,
|
GeoBoundingBox,
|
||||||
|
GeoPolygon,
|
||||||
|
GeoPolygonNotEnoughPoints(usize),
|
||||||
|
GeoCoordinatesNotPair(usize),
|
||||||
MisusedGeoRadius,
|
MisusedGeoRadius,
|
||||||
MisusedGeoBoundingBox,
|
MisusedGeoBoundingBox,
|
||||||
VectorFilterLeftover,
|
VectorFilterLeftover,
|
||||||
@@ -189,7 +193,7 @@ impl Display for Error<'_> {
|
|||||||
}
|
}
|
||||||
ErrorKind::InvalidPrimary => {
|
ErrorKind::InvalidPrimary => {
|
||||||
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
||||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` {text}")?
|
||||||
}
|
}
|
||||||
ErrorKind::InvalidEscapedNumber => {
|
ErrorKind::InvalidEscapedNumber => {
|
||||||
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
|
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
|
||||||
@@ -198,11 +202,23 @@ impl Display for Error<'_> {
|
|||||||
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
||||||
}
|
}
|
||||||
ErrorKind::GeoRadius => {
|
ErrorKind::GeoRadius => {
|
||||||
writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")?
|
writeln!(f, "The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.")?
|
||||||
|
}
|
||||||
|
ErrorKind::GeoRadiusArgumentCount(count) => {
|
||||||
|
writeln!(f, "Was expecting 3 or 4 arguments for `_geoRadius`, but instead found {count}.")?
|
||||||
}
|
}
|
||||||
ErrorKind::GeoBoundingBox => {
|
ErrorKind::GeoBoundingBox => {
|
||||||
writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")?
|
writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")?
|
||||||
}
|
}
|
||||||
|
ErrorKind::GeoPolygon => {
|
||||||
|
writeln!(f, "The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.")?
|
||||||
|
}
|
||||||
|
ErrorKind::GeoPolygonNotEnoughPoints(n) => {
|
||||||
|
writeln!(f, "The `_geoPolygon` filter expects at least 3 points but only {n} were specified")?;
|
||||||
|
}
|
||||||
|
ErrorKind::GeoCoordinatesNotPair(number) => {
|
||||||
|
writeln!(f, "Was expecting 2 coordinates but instead found {number}.")?
|
||||||
|
}
|
||||||
ErrorKind::ReservedGeo(name) => {
|
ErrorKind::ReservedGeo(name) => {
|
||||||
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.", name.escape_debug())?
|
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.", name.escape_debug())?
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,7 @@
|
|||||||
//! word = (alphanumeric | _ | - | .)+
|
//! word = (alphanumeric | _ | - | .)+
|
||||||
//! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")"
|
//! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")"
|
||||||
//! geoBoundingBox = "_geoBoundingBox([" WS * float WS* "," WS* float WS* "], [" WS* float WS* "," WS* float WS* "]")
|
//! geoBoundingBox = "_geoBoundingBox([" WS * float WS* "," WS* float WS* "], [" WS* float WS* "," WS* float WS* "]")
|
||||||
|
//! geoPolygon = "_geoPolygon([[" WS* float WS* "," WS* float WS* "],+])"
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! Other BNF grammar used to handle some specific errors:
|
//! Other BNF grammar used to handle some specific errors:
|
||||||
@@ -116,7 +117,7 @@ impl<'a> Token<'a> {
|
|||||||
self.span
|
self.span
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_finite_float(&self) -> Result<f64, Error> {
|
pub fn parse_finite_float(&self) -> Result<f64, Error<'a>> {
|
||||||
let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?;
|
let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?;
|
||||||
if value.is_finite() {
|
if value.is_finite() {
|
||||||
Ok(value)
|
Ok(value)
|
||||||
@@ -156,8 +157,9 @@ pub enum FilterCondition<'a> {
|
|||||||
Or(Vec<Self>),
|
Or(Vec<Self>),
|
||||||
And(Vec<Self>),
|
And(Vec<Self>),
|
||||||
VectorExists { fid: Token<'a>, embedder: Option<Token<'a>>, filter: VectorFilter<'a> },
|
VectorExists { fid: Token<'a>, embedder: Option<Token<'a>>, filter: VectorFilter<'a> },
|
||||||
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> },
|
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a>, resolution: Option<Token<'a>> },
|
||||||
GeoBoundingBox { top_right_point: [Token<'a>; 2], bottom_left_point: [Token<'a>; 2] },
|
GeoBoundingBox { top_right_point: [Token<'a>; 2], bottom_left_point: [Token<'a>; 2] },
|
||||||
|
GeoPolygon { points: Vec<[Token<'a>; 2]> },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum TraversedElement<'a> {
|
pub enum TraversedElement<'a> {
|
||||||
@@ -166,7 +168,7 @@ pub enum TraversedElement<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> FilterCondition<'a> {
|
impl<'a> FilterCondition<'a> {
|
||||||
pub fn use_contains_operator(&self) -> Option<&Token> {
|
pub fn use_contains_operator(&self) -> Option<&Token<'a>> {
|
||||||
match self {
|
match self {
|
||||||
FilterCondition::Condition { fid: _, op } => match op {
|
FilterCondition::Condition { fid: _, op } => match op {
|
||||||
Condition::GreaterThan(_)
|
Condition::GreaterThan(_)
|
||||||
@@ -189,11 +191,12 @@ impl<'a> FilterCondition<'a> {
|
|||||||
FilterCondition::VectorExists { .. }
|
FilterCondition::VectorExists { .. }
|
||||||
| FilterCondition::GeoLowerThan { .. }
|
| FilterCondition::GeoLowerThan { .. }
|
||||||
| FilterCondition::GeoBoundingBox { .. }
|
| FilterCondition::GeoBoundingBox { .. }
|
||||||
|
| FilterCondition::GeoPolygon { .. }
|
||||||
| FilterCondition::In { .. } => None,
|
| FilterCondition::In { .. } => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn use_vector_filter(&self) -> Option<&Token> {
|
pub fn use_vector_filter(&self) -> Option<&Token<'a>> {
|
||||||
match self {
|
match self {
|
||||||
FilterCondition::Condition { .. } => None,
|
FilterCondition::Condition { .. } => None,
|
||||||
FilterCondition::Not(this) => this.use_vector_filter(),
|
FilterCondition::Not(this) => this.use_vector_filter(),
|
||||||
@@ -202,12 +205,13 @@ impl<'a> FilterCondition<'a> {
|
|||||||
}
|
}
|
||||||
FilterCondition::GeoLowerThan { .. }
|
FilterCondition::GeoLowerThan { .. }
|
||||||
| FilterCondition::GeoBoundingBox { .. }
|
| FilterCondition::GeoBoundingBox { .. }
|
||||||
|
| FilterCondition::GeoPolygon { .. }
|
||||||
| FilterCondition::In { .. } => None,
|
| FilterCondition::In { .. } => None,
|
||||||
FilterCondition::VectorExists { fid, .. } => Some(fid),
|
FilterCondition::VectorExists { fid, .. } => Some(fid),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fids(&self, depth: usize) -> Box<dyn Iterator<Item = &Token> + '_> {
|
pub fn fids(&self, depth: usize) -> Box<dyn Iterator<Item = &Token<'a>> + '_> {
|
||||||
if depth == 0 {
|
if depth == 0 {
|
||||||
return Box::new(std::iter::empty());
|
return Box::new(std::iter::empty());
|
||||||
}
|
}
|
||||||
@@ -228,7 +232,7 @@ impl<'a> FilterCondition<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the first token found at the specified depth, `None` if no token at this depth.
|
/// Returns the first token found at the specified depth, `None` if no token at this depth.
|
||||||
pub fn token_at_depth(&self, depth: usize) -> Option<&Token> {
|
pub fn token_at_depth(&self, depth: usize) -> Option<&Token<'a>> {
|
||||||
match self {
|
match self {
|
||||||
FilterCondition::Condition { fid, .. } if depth == 0 => Some(fid),
|
FilterCondition::Condition { fid, .. } if depth == 0 => Some(fid),
|
||||||
FilterCondition::Or(subfilters) => {
|
FilterCondition::Or(subfilters) => {
|
||||||
@@ -396,23 +400,27 @@ fn parse_not(input: Span, depth: usize) -> IResult<FilterCondition> {
|
|||||||
/// If we parse `_geoRadius` we MUST parse the rest of the expression.
|
/// If we parse `_geoRadius` we MUST parse the rest of the expression.
|
||||||
fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
||||||
// we want to allow space BEFORE the _geoRadius but not after
|
// we want to allow space BEFORE the _geoRadius but not after
|
||||||
let parsed = preceded(
|
|
||||||
tuple((multispace0, word_exact("_geoRadius"))),
|
let (input, _) = tuple((multispace0, word_exact("_geoRadius")))(input)?;
|
||||||
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
|
|
||||||
cut(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))),
|
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
|
||||||
)(input)
|
|
||||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoRadius)));
|
let parsed =
|
||||||
|
delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))(input)
|
||||||
|
.map_cut(ErrorKind::GeoRadius);
|
||||||
|
|
||||||
let (input, args) = parsed?;
|
let (input, args) = parsed?;
|
||||||
|
|
||||||
if args.len() != 3 {
|
if !(3..=4).contains(&args.len()) {
|
||||||
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoRadius)));
|
return Err(Error::failure_from_kind(input, ErrorKind::GeoRadiusArgumentCount(args.len())));
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = FilterCondition::GeoLowerThan {
|
let res = FilterCondition::GeoLowerThan {
|
||||||
point: [args[0].into(), args[1].into()],
|
point: [args[0].into(), args[1].into()],
|
||||||
radius: args[2].into(),
|
radius: args[2].into(),
|
||||||
|
resolution: args.get(3).cloned().map(Token::from),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok((input, res))
|
Ok((input, res))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -420,26 +428,33 @@ fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
|||||||
/// If we parse `_geoBoundingBox` we MUST parse the rest of the expression.
|
/// If we parse `_geoBoundingBox` we MUST parse the rest of the expression.
|
||||||
fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
||||||
// we want to allow space BEFORE the _geoBoundingBox but not after
|
// we want to allow space BEFORE the _geoBoundingBox but not after
|
||||||
let parsed = preceded(
|
|
||||||
tuple((multispace0, word_exact("_geoBoundingBox"))),
|
let (input, _) = tuple((multispace0, word_exact("_geoBoundingBox")))(input)?;
|
||||||
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
|
|
||||||
cut(delimited(
|
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
|
||||||
char('('),
|
|
||||||
separated_list1(
|
let (input, args) = delimited(
|
||||||
tag(","),
|
char('('),
|
||||||
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
separated_list1(
|
||||||
),
|
tag(","),
|
||||||
char(')'),
|
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
||||||
)),
|
),
|
||||||
|
char(')'),
|
||||||
)(input)
|
)(input)
|
||||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoBoundingBox)));
|
.map_cut(ErrorKind::GeoBoundingBox)?;
|
||||||
|
|
||||||
let (input, args) = parsed?;
|
if args.len() != 2 {
|
||||||
|
|
||||||
if args.len() != 2 || args[0].len() != 2 || args[1].len() != 2 {
|
|
||||||
return Err(Error::failure_from_kind(input, ErrorKind::GeoBoundingBox));
|
return Err(Error::failure_from_kind(input, ErrorKind::GeoBoundingBox));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(offending) = args.iter().find(|a| a.len() != 2) {
|
||||||
|
let context = offending.first().unwrap_or(&input);
|
||||||
|
return Err(Error::failure_from_kind(
|
||||||
|
*context,
|
||||||
|
ErrorKind::GeoCoordinatesNotPair(offending.len()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
let res = FilterCondition::GeoBoundingBox {
|
let res = FilterCondition::GeoBoundingBox {
|
||||||
top_right_point: [args[0][0].into(), args[0][1].into()],
|
top_right_point: [args[0][0].into(), args[0][1].into()],
|
||||||
bottom_left_point: [args[1][0].into(), args[1][1].into()],
|
bottom_left_point: [args[1][0].into(), args[1][1].into()],
|
||||||
@@ -447,6 +462,47 @@ fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
|||||||
Ok((input, res))
|
Ok((input, res))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// geoPolygon = "_geoPolygon([[" WS* float WS* "," WS* float WS* "],+])"
|
||||||
|
/// If we parse `_geoPolygon` we MUST parse the rest of the expression.
|
||||||
|
fn parse_geo_polygon(input: Span) -> IResult<FilterCondition> {
|
||||||
|
// we want to allow space BEFORE the _geoPolygon but not after
|
||||||
|
|
||||||
|
let (input, _) = tuple((multispace0, word_exact("_geoPolygon")))(input)?;
|
||||||
|
|
||||||
|
// if we were able to parse `_geoPolygon` and can't parse the rest of the input we return a failure
|
||||||
|
|
||||||
|
let (input, args): (_, Vec<Vec<LocatedSpan<_, _>>>) = delimited(
|
||||||
|
char('('),
|
||||||
|
separated_list1(
|
||||||
|
tag(","),
|
||||||
|
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
||||||
|
),
|
||||||
|
preceded(opt(ws(char(','))), char(')')), // Tolerate trailing comma
|
||||||
|
)(input)
|
||||||
|
.map_cut(ErrorKind::GeoPolygon)?;
|
||||||
|
|
||||||
|
if args.len() < 3 {
|
||||||
|
let context = args.last().and_then(|a| a.last()).unwrap_or(&input);
|
||||||
|
return Err(Error::failure_from_kind(
|
||||||
|
*context,
|
||||||
|
ErrorKind::GeoPolygonNotEnoughPoints(args.len()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(offending) = args.iter().find(|a| a.len() != 2) {
|
||||||
|
let context = offending.first().unwrap_or(&input);
|
||||||
|
return Err(Error::failure_from_kind(
|
||||||
|
*context,
|
||||||
|
ErrorKind::GeoCoordinatesNotPair(offending.len()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let res = FilterCondition::GeoPolygon {
|
||||||
|
points: args.into_iter().map(|a| [a[0].into(), a[1].into()]).collect(),
|
||||||
|
};
|
||||||
|
Ok((input, res))
|
||||||
|
}
|
||||||
|
|
||||||
/// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float)
|
/// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float)
|
||||||
fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
|
fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
|
||||||
// we want to forbid space BEFORE the _geoPoint but not after
|
// we want to forbid space BEFORE the _geoPoint but not after
|
||||||
@@ -516,8 +572,8 @@ fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
|||||||
Error::new_from_kind(input, ErrorKind::MissingClosingDelimiter(c.char()))
|
Error::new_from_kind(input, ErrorKind::MissingClosingDelimiter(c.char()))
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
parse_geo_radius,
|
// Made a random block of functions because we reached the maximum number of elements per alt
|
||||||
parse_geo_bounding_box,
|
alt((parse_geo_radius, parse_geo_bounding_box, parse_geo_polygon)),
|
||||||
parse_in,
|
parse_in,
|
||||||
parse_not_in,
|
parse_not_in,
|
||||||
parse_condition,
|
parse_condition,
|
||||||
@@ -597,9 +653,12 @@ impl std::fmt::Display for FilterCondition<'_> {
|
|||||||
}
|
}
|
||||||
write!(f, " EXISTS")
|
write!(f, " EXISTS")
|
||||||
}
|
}
|
||||||
FilterCondition::GeoLowerThan { point, radius } => {
|
FilterCondition::GeoLowerThan { point, radius, resolution: None } => {
|
||||||
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
||||||
}
|
}
|
||||||
|
FilterCondition::GeoLowerThan { point, radius, resolution: Some(resolution) } => {
|
||||||
|
write!(f, "_geoRadius({}, {}, {}, {})", point[0], point[1], radius, resolution)
|
||||||
|
}
|
||||||
FilterCondition::GeoBoundingBox {
|
FilterCondition::GeoBoundingBox {
|
||||||
top_right_point: top_left_point,
|
top_right_point: top_left_point,
|
||||||
bottom_left_point: bottom_right_point,
|
bottom_left_point: bottom_right_point,
|
||||||
@@ -613,6 +672,13 @@ impl std::fmt::Display for FilterCondition<'_> {
|
|||||||
bottom_right_point[1]
|
bottom_right_point[1]
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
FilterCondition::GeoPolygon { points } => {
|
||||||
|
write!(f, "_geoPolygon([")?;
|
||||||
|
for point in points {
|
||||||
|
write!(f, "[{}, {}], ", point[0], point[1])?;
|
||||||
|
}
|
||||||
|
write!(f, "])")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -651,7 +717,7 @@ pub mod tests {
|
|||||||
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
||||||
pub fn rtok<'a>(before: &'a str, value: &'a str) -> Token<'a> {
|
pub fn rtok<'a>(before: &'a str, value: &'a str) -> Token<'a> {
|
||||||
// if the string is empty we still need to return 1 for the line number
|
// if the string is empty we still need to return 1 for the line number
|
||||||
let lines = before.is_empty().then_some(1).unwrap_or_else(|| before.lines().count());
|
let lines = if before.is_empty() { 1 } else { before.lines().count() };
|
||||||
let offset = before.chars().count();
|
let offset = before.chars().count();
|
||||||
// the extra field is not checked in the tests so we can set it to nothing
|
// the extra field is not checked in the tests so we can set it to nothing
|
||||||
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
||||||
@@ -776,12 +842,17 @@ pub mod tests {
|
|||||||
insta::assert_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
insta::assert_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
||||||
insta::assert_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
insta::assert_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
||||||
insta::assert_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
|
insta::assert_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
|
||||||
|
insta::assert_snapshot!(p("_geoRadius(12,13,14,1000)"), @"_geoRadius({12}, {13}, {14}, {1000})");
|
||||||
|
|
||||||
// Test geo bounding box
|
// Test geo bounding box
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
insta::assert_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||||
insta::assert_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
|
insta::assert_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
insta::assert_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||||
|
|
||||||
|
// Test geo polygon
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([12, 13], [14, 15], [16, 17])"), @"_geoPolygon([[{12}, {13}], [{14}, {15}], [{16}, {17}], ])");
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([12, 13], [14, 15], [-1.2,2939.2], [1,1])"), @"_geoPolygon([[{12}, {13}], [{14}, {15}], [{-1.2}, {2939.2}], [{1}, {1}], ])");
|
||||||
|
|
||||||
// Test OR + AND
|
// Test OR + AND
|
||||||
insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||||
insta::assert_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
insta::assert_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||||
@@ -838,50 +909,80 @@ pub mod tests {
|
|||||||
11:12 channel = 🐻 AND followers < 100
|
11:12 channel = 🐻 AND followers < 100
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_snapshot!(p("'OR'"), @r###"
|
insta::assert_snapshot!(p("'OR'"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `\'OR\'`.
|
||||||
1:5 'OR'
|
1:5 'OR'
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("OR"), @r###"
|
insta::assert_snapshot!(p("OR"), @r###"
|
||||||
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
||||||
1:3 OR
|
1:3 OR
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_snapshot!(p("channel Ponce"), @r###"
|
insta::assert_snapshot!(p("channel Ponce"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `channel Ponce`.
|
||||||
1:14 channel Ponce
|
1:14 channel Ponce
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("channel = Ponce OR"), @r###"
|
insta::assert_snapshot!(p("channel = Ponce OR"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` but instead got nothing.
|
||||||
19:19 channel = Ponce OR
|
19:19 channel = Ponce OR
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoRadius"), @r###"
|
insta::assert_snapshot!(p("_geoRadius"), @r"
|
||||||
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.
|
||||||
1:11 _geoRadius
|
11:11 _geoRadius
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoRadius = 12"), @r###"
|
insta::assert_snapshot!(p("_geoRadius = 12"), @r"
|
||||||
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.
|
||||||
1:16 _geoRadius = 12
|
11:16 _geoRadius = 12
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox"), @r"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:16 _geoBoundingBox
|
16:16 _geoBoundingBox
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:21 _geoBoundingBox = 12
|
16:21 _geoBoundingBox = 12
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:26 _geoBoundingBox(1.0, 1.0)
|
17:26 _geoBoundingBox(1.0, 1.0)
|
||||||
"###);
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([1,2,3])"), @r"
|
||||||
|
The `_geoPolygon` filter expects at least 3 points but only 1 were specified
|
||||||
|
18:19 _geoPolygon([1,2,3])
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon(1,2,3)"), @r"
|
||||||
|
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||||
|
13:19 _geoPolygon(1,2,3)
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([1,2],[1,2],[1,2,3])"), @r"
|
||||||
|
Was expecting 2 coordinates but instead found 3.
|
||||||
|
26:27 _geoPolygon([1,2],[1,2],[1,2,3])
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([1,2],[1,2,3])"), @r"
|
||||||
|
The `_geoPolygon` filter expects at least 3 points but only 2 were specified
|
||||||
|
24:25 _geoPolygon([1,2],[1,2,3])
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon(1)"), @r"
|
||||||
|
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||||
|
13:15 _geoPolygon(1)
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([1,2)"), @r"
|
||||||
|
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||||
|
17:18 _geoPolygon([1,2)
|
||||||
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
||||||
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||||
@@ -938,15 +1039,15 @@ pub mod tests {
|
|||||||
34:35 channel = mv OR followers >= 1000)
|
34:35 channel = mv OR followers >= 1000)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_snapshot!(p("colour NOT EXIST"), @r###"
|
insta::assert_snapshot!(p("colour NOT EXIST"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `colour NOT EXIST`.
|
||||||
1:17 colour NOT EXIST
|
1:17 colour NOT EXIST
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r###"
|
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `subscribers 100 TO1000`.
|
||||||
1:23 subscribers 100 TO1000
|
1:23 subscribers 100 TO1000
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
|
insta::assert_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
|
||||||
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
|
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
|
||||||
@@ -1071,38 +1172,38 @@ pub mod tests {
|
|||||||
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_snapshot!(p(r#"value NULL"#), @r###"
|
insta::assert_snapshot!(p(r#"value NULL"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NULL`.
|
||||||
1:11 value NULL
|
1:11 value NULL
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r###"
|
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NOT NULL`.
|
||||||
1:15 value NOT NULL
|
1:15 value NOT NULL
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value EMPTY"#), @r###"
|
insta::assert_snapshot!(p(r#"value EMPTY"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value EMPTY`.
|
||||||
1:12 value EMPTY
|
1:12 value EMPTY
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r###"
|
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NOT EMPTY`.
|
||||||
1:16 value NOT EMPTY
|
1:16 value NOT EMPTY
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value IS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS`.
|
||||||
1:9 value IS
|
1:9 value IS
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value IS NOT"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS NOT"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS NOT`.
|
||||||
1:13 value IS NOT
|
1:13 value IS NOT
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS EXISTS`.
|
||||||
1:16 value IS EXISTS
|
1:16 value IS EXISTS
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS NOT EXISTS`.
|
||||||
1:20 value IS NOT EXISTS
|
1:20 value IS NOT EXISTS
|
||||||
"###);
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ license.workspace = true
|
|||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
criterion = { version = "0.7.0", features = ["html_reports"] }
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "benchmarks"
|
name = "benchmarks"
|
||||||
|
|||||||
@@ -11,12 +11,12 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arbitrary = { version = "1.4.1", features = ["derive"] }
|
arbitrary = { version = "1.4.2", features = ["derive"] }
|
||||||
bumpalo = "3.18.1"
|
bumpalo = "3.19.0"
|
||||||
clap = { version = "4.5.40", features = ["derive"] }
|
clap = { version = "4.5.52", features = ["derive"] }
|
||||||
either = "1.15.0"
|
either = "1.15.0"
|
||||||
fastrand = "2.3.0"
|
fastrand = "2.3.0"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.228", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.145", features = ["preserve_order"] }
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.23.0"
|
||||||
|
|||||||
@@ -11,31 +11,33 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.100"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
byte-unit = "5.1.6"
|
byte-unit = "5.1.6"
|
||||||
bumpalo = "3.18.1"
|
bytes = "1.11.0"
|
||||||
|
bumpalo = "3.19.0"
|
||||||
bumparaw-collections = "0.1.4"
|
bumparaw-collections = "0.1.4"
|
||||||
convert_case = "0.8.0"
|
convert_case = "0.9.0"
|
||||||
csv = "1.3.1"
|
csv = "1.4.0"
|
||||||
derive_builder = "0.20.2"
|
derive_builder = "0.20.2"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
enum-iterator = "2.1.0"
|
enum-iterator = "2.3.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.1.2"
|
flate2 = "1.1.5"
|
||||||
indexmap = "2.9.0"
|
indexmap = "2.12.0"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
memmap2 = "0.9.7"
|
memmap2 = "0.9.9"
|
||||||
page_size = "0.6.0"
|
page_size = "0.6.0"
|
||||||
rayon = "1.10.0"
|
rayon = "1.11.0"
|
||||||
roaring = { version = "0.10.12", features = ["serde"] }
|
roaring = { version = "0.10.12", features = ["serde"] }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.228", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.145", features = ["preserve_order"] }
|
||||||
|
tar = "0.4.44"
|
||||||
synchronoise = "1.0.1"
|
synchronoise = "1.0.1"
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.23.0"
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.17"
|
||||||
time = { version = "0.3.41", features = [
|
time = { version = "0.3.44", features = [
|
||||||
"serde-well-known",
|
"serde-well-known",
|
||||||
"formatting",
|
"formatting",
|
||||||
"parsing",
|
"parsing",
|
||||||
@@ -43,8 +45,11 @@ time = { version = "0.3.41", features = [
|
|||||||
] }
|
] }
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
ureq = "2.12.1"
|
ureq = "2.12.1"
|
||||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
uuid = { version = "1.18.1", features = ["serde", "v4"] }
|
||||||
backoff = "0.4.0"
|
backoff = "0.4.0"
|
||||||
|
reqwest = { version = "0.12.24", features = ["rustls-tls", "http2"], default-features = false }
|
||||||
|
rusty-s3 = "0.8.1"
|
||||||
|
tokio = { version = "1.48.0", features = ["full"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
#![allow(clippy::result_large_err)]
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
@@ -148,6 +150,7 @@ impl<'a> Dump<'a> {
|
|||||||
details: task.details,
|
details: task.details,
|
||||||
status: task.status,
|
status: task.status,
|
||||||
network: task.network,
|
network: task.network,
|
||||||
|
custom_metadata: task.custom_metadata,
|
||||||
kind: match task.kind {
|
kind: match task.kind {
|
||||||
KindDump::DocumentImport {
|
KindDump::DocumentImport {
|
||||||
primary_key,
|
primary_key,
|
||||||
@@ -232,6 +235,9 @@ impl<'a> Dump<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
KindDump::UpgradeDatabase { from } => KindWithContent::UpgradeDatabase { from },
|
KindDump::UpgradeDatabase { from } => KindWithContent::UpgradeDatabase { from },
|
||||||
|
KindDump::IndexCompaction { index_uid } => {
|
||||||
|
KindWithContent::IndexCompaction { index_uid }
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ use meilisearch_types::error::{Code, ErrorCode};
|
|||||||
use meilisearch_types::milli::index::RollbackOutcome;
|
use meilisearch_types::milli::index::RollbackOutcome;
|
||||||
use meilisearch_types::tasks::{Kind, Status};
|
use meilisearch_types::tasks::{Kind, Status};
|
||||||
use meilisearch_types::{heed, milli};
|
use meilisearch_types::{heed, milli};
|
||||||
|
use reqwest::StatusCode;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::TaskId;
|
use crate::TaskId;
|
||||||
@@ -127,6 +128,14 @@ pub enum Error {
|
|||||||
#[error("Aborted task")]
|
#[error("Aborted task")]
|
||||||
AbortedTask,
|
AbortedTask,
|
||||||
|
|
||||||
|
#[error("S3 error: status: {status}, body: {body}")]
|
||||||
|
S3Error { status: StatusCode, body: String },
|
||||||
|
#[error("S3 HTTP error: {0}")]
|
||||||
|
S3HttpError(reqwest::Error),
|
||||||
|
#[error("S3 XML error: {0}")]
|
||||||
|
S3XmlError(Box<dyn std::error::Error + Send + Sync>),
|
||||||
|
#[error("S3 bucket error: {0}")]
|
||||||
|
S3BucketError(rusty_s3::BucketError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Dump(#[from] dump::Error),
|
Dump(#[from] dump::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
@@ -226,6 +235,10 @@ impl Error {
|
|||||||
| Error::TaskCancelationWithEmptyQuery
|
| Error::TaskCancelationWithEmptyQuery
|
||||||
| Error::FromRemoteWhenExporting { .. }
|
| Error::FromRemoteWhenExporting { .. }
|
||||||
| Error::AbortedTask
|
| Error::AbortedTask
|
||||||
|
| Error::S3Error { .. }
|
||||||
|
| Error::S3HttpError(_)
|
||||||
|
| Error::S3XmlError(_)
|
||||||
|
| Error::S3BucketError(_)
|
||||||
| Error::Dump(_)
|
| Error::Dump(_)
|
||||||
| Error::Heed(_)
|
| Error::Heed(_)
|
||||||
| Error::Milli { .. }
|
| Error::Milli { .. }
|
||||||
@@ -293,8 +306,14 @@ impl ErrorCode for Error {
|
|||||||
Error::BatchNotFound(_) => Code::BatchNotFound,
|
Error::BatchNotFound(_) => Code::BatchNotFound,
|
||||||
Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters,
|
Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters,
|
||||||
Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters,
|
Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters,
|
||||||
// TODO: not sure of the Code to use
|
|
||||||
Error::NoSpaceLeftInTaskQueue => Code::NoSpaceLeftOnDevice,
|
Error::NoSpaceLeftInTaskQueue => Code::NoSpaceLeftOnDevice,
|
||||||
|
Error::S3Error { status, .. } if status.is_client_error() => {
|
||||||
|
Code::InvalidS3SnapshotRequest
|
||||||
|
}
|
||||||
|
Error::S3Error { .. } => Code::S3SnapshotServerError,
|
||||||
|
Error::S3HttpError(_) => Code::S3SnapshotServerError,
|
||||||
|
Error::S3XmlError(_) => Code::S3SnapshotServerError,
|
||||||
|
Error::S3BucketError(_) => Code::InvalidS3SnapshotParameters,
|
||||||
Error::Dump(e) => e.error_code(),
|
Error::Dump(e) => e.error_code(),
|
||||||
Error::Milli { error, .. } => error.error_code(),
|
Error::Milli { error, .. } => error.error_code(),
|
||||||
Error::ProcessBatchPanicked(_) => Code::Internal,
|
Error::ProcessBatchPanicked(_) => Code::Internal,
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
|
|
||||||
use meilisearch_types::enterprise_edition::network::Network;
|
|
||||||
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
|
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
|
||||||
use meilisearch_types::heed::types::{SerdeJson, Str};
|
use meilisearch_types::heed::types::{SerdeJson, Str};
|
||||||
use meilisearch_types::heed::{Database, Env, RwTxn, WithoutTls};
|
use meilisearch_types::heed::{Database, Env, RwTxn, WithoutTls};
|
||||||
|
use meilisearch_types::network::Network;
|
||||||
|
|
||||||
use crate::error::FeatureNotEnabledError;
|
use crate::error::FeatureNotEnabledError;
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
@@ -158,6 +158,19 @@ impl RoFeatures {
|
|||||||
.into())
|
.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn check_vector_store_setting(&self, disabled_action: &'static str) -> Result<()> {
|
||||||
|
if self.runtime.vector_store_setting {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(FeatureNotEnabledError {
|
||||||
|
disabled_action,
|
||||||
|
feature: "vector_store_setting",
|
||||||
|
issue_link: "https://github.com/orgs/meilisearch/discussions/860",
|
||||||
|
}
|
||||||
|
.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FeatureData {
|
impl FeatureData {
|
||||||
|
|||||||
@@ -143,10 +143,10 @@ impl IndexStats {
|
|||||||
///
|
///
|
||||||
/// - rtxn: a RO transaction for the index, obtained from `Index::read_txn()`.
|
/// - rtxn: a RO transaction for the index, obtained from `Index::read_txn()`.
|
||||||
pub fn new(index: &Index, rtxn: &RoTxn) -> milli::Result<Self> {
|
pub fn new(index: &Index, rtxn: &RoTxn) -> milli::Result<Self> {
|
||||||
let arroy_stats = index.arroy_stats(rtxn)?;
|
let vector_store_stats = index.vector_store_stats(rtxn)?;
|
||||||
Ok(IndexStats {
|
Ok(IndexStats {
|
||||||
number_of_embeddings: Some(arroy_stats.number_of_embeddings),
|
number_of_embeddings: Some(vector_store_stats.number_of_embeddings),
|
||||||
number_of_embedded_documents: Some(arroy_stats.documents.len()),
|
number_of_embedded_documents: Some(vector_store_stats.documents.len()),
|
||||||
documents_database_stats: index.documents_stats(rtxn)?.unwrap_or_default(),
|
documents_database_stats: index.documents_stats(rtxn)?.unwrap_or_default(),
|
||||||
number_of_documents: None,
|
number_of_documents: None,
|
||||||
database_size: index.on_disk_size()?,
|
database_size: index.on_disk_size()?,
|
||||||
@@ -199,7 +199,7 @@ impl IndexMapper {
|
|||||||
let uuid = Uuid::new_v4();
|
let uuid = Uuid::new_v4();
|
||||||
self.index_mapping.put(&mut wtxn, name, &uuid)?;
|
self.index_mapping.put(&mut wtxn, name, &uuid)?;
|
||||||
|
|
||||||
let index_path = self.base_path.join(uuid.to_string());
|
let index_path = self.index_path(uuid);
|
||||||
fs::create_dir_all(&index_path)?;
|
fs::create_dir_all(&index_path)?;
|
||||||
|
|
||||||
// Error if the UUIDv4 somehow already exists in the map, since it should be fresh.
|
// Error if the UUIDv4 somehow already exists in the map, since it should be fresh.
|
||||||
@@ -286,7 +286,7 @@ impl IndexMapper {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let index_map = self.index_map.clone();
|
let index_map = self.index_map.clone();
|
||||||
let index_path = self.base_path.join(uuid.to_string());
|
let index_path = self.index_path(uuid);
|
||||||
let index_name = name.to_string();
|
let index_name = name.to_string();
|
||||||
thread::Builder::new()
|
thread::Builder::new()
|
||||||
.name(String::from("index_deleter"))
|
.name(String::from("index_deleter"))
|
||||||
@@ -341,6 +341,26 @@ impl IndexMapper {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Closes the specified index.
|
||||||
|
///
|
||||||
|
/// This operation involves closing the underlying environment and so can take a long time to complete.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// - If the Index corresponding to the passed name is concurrently being deleted/resized or cannot be found in the
|
||||||
|
/// in memory hash map.
|
||||||
|
pub fn close_index(&self, rtxn: &RoTxn, name: &str) -> Result<()> {
|
||||||
|
let uuid = self
|
||||||
|
.index_mapping
|
||||||
|
.get(rtxn, name)?
|
||||||
|
.ok_or_else(|| Error::IndexNotFound(name.to_string()))?;
|
||||||
|
|
||||||
|
// We remove the index from the in-memory index map.
|
||||||
|
self.index_map.write().unwrap().close_for_resize(&uuid, self.enable_mdb_writemap, 0);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Return an index, may open it if it wasn't already opened.
|
/// Return an index, may open it if it wasn't already opened.
|
||||||
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
|
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
|
||||||
if let Some((current_name, current_index)) =
|
if let Some((current_name, current_index)) =
|
||||||
@@ -388,7 +408,7 @@ impl IndexMapper {
|
|||||||
} else {
|
} else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let index_path = self.base_path.join(uuid.to_string());
|
let index_path = self.index_path(uuid);
|
||||||
// take the lock to reopen the environment.
|
// take the lock to reopen the environment.
|
||||||
reopen
|
reopen
|
||||||
.reopen(&mut self.index_map.write().unwrap(), &index_path)
|
.reopen(&mut self.index_map.write().unwrap(), &index_path)
|
||||||
@@ -405,7 +425,7 @@ impl IndexMapper {
|
|||||||
// if it's not already there.
|
// if it's not already there.
|
||||||
match index_map.get(&uuid) {
|
match index_map.get(&uuid) {
|
||||||
Missing => {
|
Missing => {
|
||||||
let index_path = self.base_path.join(uuid.to_string());
|
let index_path = self.index_path(uuid);
|
||||||
|
|
||||||
break index_map
|
break index_map
|
||||||
.create(
|
.create(
|
||||||
@@ -432,6 +452,14 @@ impl IndexMapper {
|
|||||||
Ok(index)
|
Ok(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the path of the index.
|
||||||
|
///
|
||||||
|
/// The folder located at this path is containing the data.mdb,
|
||||||
|
/// the lock.mdb and an optional data.mdb.cpy file.
|
||||||
|
pub fn index_path(&self, uuid: Uuid) -> PathBuf {
|
||||||
|
self.base_path.join(uuid.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn rollback_index(
|
pub fn rollback_index(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn,
|
||||||
@@ -472,7 +500,7 @@ impl IndexMapper {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let index_path = self.base_path.join(uuid.to_string());
|
let index_path = self.index_path(uuid);
|
||||||
Index::rollback(milli::heed::EnvOpenOptions::new().read_txn_without_tls(), index_path, to)
|
Index::rollback(milli::heed::EnvOpenOptions::new().read_txn_without_tls(), index_path, to)
|
||||||
.map_err(|err| crate::Error::from_milli(err, Some(name.to_string())))
|
.map_err(|err| crate::Error::from_milli(err, Some(name.to_string())))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use meilisearch_types::heed::types::{SerdeBincode, SerdeJson, Str};
|
|||||||
use meilisearch_types::heed::{Database, RoTxn};
|
use meilisearch_types::heed::{Database, RoTxn};
|
||||||
use meilisearch_types::milli::{CboRoaringBitmapCodec, RoaringBitmapCodec, BEU32};
|
use meilisearch_types::milli::{CboRoaringBitmapCodec, RoaringBitmapCodec, BEU32};
|
||||||
use meilisearch_types::tasks::{Details, Kind, Status, Task};
|
use meilisearch_types::tasks::{Details, Kind, Status, Task};
|
||||||
use meilisearch_types::versioning;
|
use meilisearch_types::versioning::{self, VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
|
||||||
use crate::index_mapper::IndexMapper;
|
use crate::index_mapper::IndexMapper;
|
||||||
@@ -36,6 +36,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
run_loop_iteration: _,
|
run_loop_iteration: _,
|
||||||
embedders: _,
|
embedders: _,
|
||||||
chat_settings: _,
|
chat_settings: _,
|
||||||
|
runtime: _,
|
||||||
} = scheduler;
|
} = scheduler;
|
||||||
|
|
||||||
let rtxn = env.read_txn().unwrap();
|
let rtxn = env.read_txn().unwrap();
|
||||||
@@ -231,6 +232,7 @@ pub fn snapshot_task(task: &Task) -> String {
|
|||||||
status,
|
status,
|
||||||
kind,
|
kind,
|
||||||
network,
|
network,
|
||||||
|
custom_metadata,
|
||||||
} = task;
|
} = task;
|
||||||
snap.push('{');
|
snap.push('{');
|
||||||
snap.push_str(&format!("uid: {uid}, "));
|
snap.push_str(&format!("uid: {uid}, "));
|
||||||
@@ -251,6 +253,9 @@ pub fn snapshot_task(task: &Task) -> String {
|
|||||||
if let Some(network) = network {
|
if let Some(network) = network {
|
||||||
snap.push_str(&format!("network: {network:?}, "))
|
snap.push_str(&format!("network: {network:?}, "))
|
||||||
}
|
}
|
||||||
|
if let Some(custom_metadata) = custom_metadata {
|
||||||
|
snap.push_str(&format!("custom_metadata: {custom_metadata:?}"))
|
||||||
|
}
|
||||||
|
|
||||||
snap.push('}');
|
snap.push('}');
|
||||||
snap
|
snap
|
||||||
@@ -315,7 +320,14 @@ fn snapshot_details(d: &Details) -> String {
|
|||||||
format!("{{ url: {url:?}, api_key: {api_key:?}, payload_size: {payload_size:?}, indexes: {indexes:?} }}")
|
format!("{{ url: {url:?}, api_key: {api_key:?}, payload_size: {payload_size:?}, indexes: {indexes:?} }}")
|
||||||
}
|
}
|
||||||
Details::UpgradeDatabase { from, to } => {
|
Details::UpgradeDatabase { from, to } => {
|
||||||
format!("{{ from: {from:?}, to: {to:?} }}")
|
if to == &(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH) {
|
||||||
|
format!("{{ from: {from:?}, to: [current version] }}")
|
||||||
|
} else {
|
||||||
|
format!("{{ from: {from:?}, to: {to:?} }}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Details::IndexCompaction { index_uid, pre_compaction_size, post_compaction_size } => {
|
||||||
|
format!("{{ index_uid: {index_uid:?}, pre_compaction_size: {pre_compaction_size:?}, post_compaction_size: {post_compaction_size:?} }}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -392,7 +404,21 @@ pub fn snapshot_batch(batch: &Batch) -> String {
|
|||||||
|
|
||||||
snap.push('{');
|
snap.push('{');
|
||||||
snap.push_str(&format!("uid: {uid}, "));
|
snap.push_str(&format!("uid: {uid}, "));
|
||||||
snap.push_str(&format!("details: {}, ", serde_json::to_string(details).unwrap()));
|
let details = if let Some(upgrade_to) = &details.upgrade_to {
|
||||||
|
if upgrade_to.as_str()
|
||||||
|
== format!("v{VERSION_MAJOR}.{VERSION_MINOR}.{VERSION_PATCH}").as_str()
|
||||||
|
{
|
||||||
|
let mut details = details.clone();
|
||||||
|
|
||||||
|
details.upgrade_to = Some("[current version]".into());
|
||||||
|
serde_json::to_string(&details).unwrap()
|
||||||
|
} else {
|
||||||
|
serde_json::to_string(details).unwrap()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
serde_json::to_string(details).unwrap()
|
||||||
|
};
|
||||||
|
snap.push_str(&format!("details: {details}, "));
|
||||||
snap.push_str(&format!("stats: {}, ", serde_json::to_string(&stats).unwrap()));
|
snap.push_str(&format!("stats: {}, ", serde_json::to_string(&stats).unwrap()));
|
||||||
if !embedder_stats.skip_serializing() {
|
if !embedder_stats.skip_serializing() {
|
||||||
snap.push_str(&format!(
|
snap.push_str(&format!(
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
// The main Error type is large and boxing the large variant make the pattern matching fails
|
||||||
|
#![allow(clippy::result_large_err)]
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
This crate defines the index scheduler, which is responsible for:
|
This crate defines the index scheduler, which is responsible for:
|
||||||
1. Keeping references to meilisearch's indexes and mapping them to their
|
1. Keeping references to meilisearch's indexes and mapping them to their
|
||||||
@@ -51,7 +54,6 @@ pub use features::RoFeatures;
|
|||||||
use flate2::bufread::GzEncoder;
|
use flate2::bufread::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use meilisearch_types::batches::Batch;
|
use meilisearch_types::batches::Batch;
|
||||||
use meilisearch_types::enterprise_edition::network::Network;
|
|
||||||
use meilisearch_types::features::{
|
use meilisearch_types::features::{
|
||||||
ChatCompletionSettings, InstanceTogglableFeatures, RuntimeTogglableFeatures,
|
ChatCompletionSettings, InstanceTogglableFeatures, RuntimeTogglableFeatures,
|
||||||
};
|
};
|
||||||
@@ -64,6 +66,7 @@ use meilisearch_types::milli::vector::{
|
|||||||
Embedder, EmbedderOptions, RuntimeEmbedder, RuntimeEmbedders, RuntimeFragment,
|
Embedder, EmbedderOptions, RuntimeEmbedder, RuntimeEmbedders, RuntimeFragment,
|
||||||
};
|
};
|
||||||
use meilisearch_types::milli::{self, Index};
|
use meilisearch_types::milli::{self, Index};
|
||||||
|
use meilisearch_types::network::Network;
|
||||||
use meilisearch_types::task_view::TaskView;
|
use meilisearch_types::task_view::TaskView;
|
||||||
use meilisearch_types::tasks::{KindWithContent, Task, TaskNetwork};
|
use meilisearch_types::tasks::{KindWithContent, Task, TaskNetwork};
|
||||||
use meilisearch_types::webhooks::{Webhook, WebhooksDumpView, WebhooksView};
|
use meilisearch_types::webhooks::{Webhook, WebhooksDumpView, WebhooksView};
|
||||||
@@ -213,6 +216,9 @@ pub struct IndexScheduler {
|
|||||||
/// A counter that is incremented before every call to [`tick`](IndexScheduler::tick)
|
/// A counter that is incremented before every call to [`tick`](IndexScheduler::tick)
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
run_loop_iteration: Arc<RwLock<usize>>,
|
run_loop_iteration: Arc<RwLock<usize>>,
|
||||||
|
|
||||||
|
/// The tokio runtime used for asynchronous tasks.
|
||||||
|
runtime: Option<tokio::runtime::Handle>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IndexScheduler {
|
impl IndexScheduler {
|
||||||
@@ -239,6 +245,7 @@ impl IndexScheduler {
|
|||||||
run_loop_iteration: self.run_loop_iteration.clone(),
|
run_loop_iteration: self.run_loop_iteration.clone(),
|
||||||
features: self.features.clone(),
|
features: self.features.clone(),
|
||||||
chat_settings: self.chat_settings,
|
chat_settings: self.chat_settings,
|
||||||
|
runtime: self.runtime.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -252,13 +259,23 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create an index scheduler and start its run loop.
|
/// Create an index scheduler and start its run loop.
|
||||||
#[allow(private_interfaces)] // because test_utils is private
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
options: IndexSchedulerOptions,
|
options: IndexSchedulerOptions,
|
||||||
auth_env: Env<WithoutTls>,
|
auth_env: Env<WithoutTls>,
|
||||||
from_db_version: (u32, u32, u32),
|
from_db_version: (u32, u32, u32),
|
||||||
#[cfg(test)] test_breakpoint_sdr: crossbeam_channel::Sender<(test_utils::Breakpoint, bool)>,
|
runtime: Option<tokio::runtime::Handle>,
|
||||||
#[cfg(test)] planned_failures: Vec<(usize, test_utils::FailureLocation)>,
|
) -> Result<Self> {
|
||||||
|
let this = Self::new_without_run(options, auth_env, from_db_version, runtime)?;
|
||||||
|
|
||||||
|
this.run();
|
||||||
|
Ok(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_without_run(
|
||||||
|
options: IndexSchedulerOptions,
|
||||||
|
auth_env: Env<WithoutTls>,
|
||||||
|
from_db_version: (u32, u32, u32),
|
||||||
|
runtime: Option<tokio::runtime::Handle>,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
std::fs::create_dir_all(&options.tasks_path)?;
|
std::fs::create_dir_all(&options.tasks_path)?;
|
||||||
std::fs::create_dir_all(&options.update_file_path)?;
|
std::fs::create_dir_all(&options.update_file_path)?;
|
||||||
@@ -313,8 +330,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
// allow unreachable_code to get rids of the warning in the case of a test build.
|
Ok(Self {
|
||||||
let this = Self {
|
|
||||||
processing_tasks: Arc::new(RwLock::new(ProcessingTasks::new())),
|
processing_tasks: Arc::new(RwLock::new(ProcessingTasks::new())),
|
||||||
version,
|
version,
|
||||||
queue,
|
queue,
|
||||||
@@ -330,21 +346,38 @@ impl IndexScheduler {
|
|||||||
webhooks: Arc::new(webhooks),
|
webhooks: Arc::new(webhooks),
|
||||||
embedders: Default::default(),
|
embedders: Default::default(),
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)] // Will be replaced in `new_tests` in test environments
|
||||||
test_breakpoint_sdr,
|
test_breakpoint_sdr: crossbeam_channel::bounded(0).0,
|
||||||
#[cfg(test)]
|
#[cfg(test)] // Will be replaced in `new_tests` in test environments
|
||||||
planned_failures,
|
planned_failures: Default::default(),
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
run_loop_iteration: Arc::new(RwLock::new(0)),
|
run_loop_iteration: Arc::new(RwLock::new(0)),
|
||||||
features,
|
features,
|
||||||
chat_settings,
|
chat_settings,
|
||||||
};
|
runtime,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create an index scheduler and start its run loop.
|
||||||
|
#[cfg(test)]
|
||||||
|
fn new_test(
|
||||||
|
options: IndexSchedulerOptions,
|
||||||
|
auth_env: Env<WithoutTls>,
|
||||||
|
from_db_version: (u32, u32, u32),
|
||||||
|
runtime: Option<tokio::runtime::Handle>,
|
||||||
|
test_breakpoint_sdr: crossbeam_channel::Sender<(test_utils::Breakpoint, bool)>,
|
||||||
|
planned_failures: Vec<(usize, test_utils::FailureLocation)>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let mut this = Self::new_without_run(options, auth_env, from_db_version, runtime)?;
|
||||||
|
|
||||||
|
this.test_breakpoint_sdr = test_breakpoint_sdr;
|
||||||
|
this.planned_failures = planned_failures;
|
||||||
|
|
||||||
this.run();
|
this.run();
|
||||||
Ok(this)
|
Ok(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
fn read_txn(&self) -> Result<RoTxn<'_, WithoutTls>> {
|
||||||
self.env.read_txn().map_err(|e| e.into())
|
self.env.read_txn().map_err(|e| e.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -723,6 +756,19 @@ impl IndexScheduler {
|
|||||||
kind: KindWithContent,
|
kind: KindWithContent,
|
||||||
task_id: Option<TaskId>,
|
task_id: Option<TaskId>,
|
||||||
dry_run: bool,
|
dry_run: bool,
|
||||||
|
) -> Result<Task> {
|
||||||
|
self.register_with_custom_metadata(kind, task_id, None, dry_run)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Register a new task in the scheduler, with metadata.
|
||||||
|
///
|
||||||
|
/// If it fails and data was associated with the task, it tries to delete the associated data.
|
||||||
|
pub fn register_with_custom_metadata(
|
||||||
|
&self,
|
||||||
|
kind: KindWithContent,
|
||||||
|
task_id: Option<TaskId>,
|
||||||
|
custom_metadata: Option<String>,
|
||||||
|
dry_run: bool,
|
||||||
) -> Result<Task> {
|
) -> Result<Task> {
|
||||||
// if the task doesn't delete or cancel anything and 40% of the task queue is full, we must refuse to enqueue the incoming task
|
// if the task doesn't delete or cancel anything and 40% of the task queue is full, we must refuse to enqueue the incoming task
|
||||||
if !matches!(&kind, KindWithContent::TaskDeletion { tasks, .. } | KindWithContent::TaskCancelation { tasks, .. } if !tasks.is_empty())
|
if !matches!(&kind, KindWithContent::TaskDeletion { tasks, .. } | KindWithContent::TaskCancelation { tasks, .. } if !tasks.is_empty())
|
||||||
@@ -733,7 +779,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
let mut wtxn = self.env.write_txn()?;
|
||||||
let task = self.queue.register(&mut wtxn, &kind, task_id, dry_run)?;
|
let task = self.queue.register(&mut wtxn, &kind, task_id, custom_metadata, dry_run)?;
|
||||||
|
|
||||||
// If the registered task is a task cancelation
|
// If the registered task is a task cancelation
|
||||||
// we inform the processing tasks to stop (if necessary).
|
// we inform the processing tasks to stop (if necessary).
|
||||||
@@ -757,7 +803,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
/// Register a new task coming from a dump in the scheduler.
|
/// Register a new task coming from a dump in the scheduler.
|
||||||
/// By taking a mutable ref we're pretty sure no one will ever import a dump while actix is running.
|
/// By taking a mutable ref we're pretty sure no one will ever import a dump while actix is running.
|
||||||
pub fn register_dumped_task(&mut self) -> Result<Dump> {
|
pub fn register_dumped_task(&mut self) -> Result<Dump<'_>> {
|
||||||
Dump::new(self)
|
Dump::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -806,10 +852,8 @@ impl IndexScheduler {
|
|||||||
.queue
|
.queue
|
||||||
.tasks
|
.tasks
|
||||||
.get_task(self.rtxn, task_id)
|
.get_task(self.rtxn, task_id)
|
||||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?
|
.map_err(io::Error::other)?
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| io::Error::other(Error::CorruptedTaskQueue))?;
|
||||||
io::Error::new(io::ErrorKind::Other, Error::CorruptedTaskQueue)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
serde_json::to_writer(&mut self.buffer, &TaskView::from_task(&task))?;
|
serde_json::to_writer(&mut self.buffer, &TaskView::from_task(&task))?;
|
||||||
self.buffer.push(b'\n');
|
self.buffer.push(b'\n');
|
||||||
|
|||||||
@@ -75,6 +75,7 @@ make_enum_progress! {
|
|||||||
pub enum TaskCancelationProgress {
|
pub enum TaskCancelationProgress {
|
||||||
RetrievingTasks,
|
RetrievingTasks,
|
||||||
CancelingUpgrade,
|
CancelingUpgrade,
|
||||||
|
CleaningCompactionLeftover,
|
||||||
UpdatingTasks,
|
UpdatingTasks,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -138,6 +139,17 @@ make_enum_progress! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
make_enum_progress! {
|
||||||
|
pub enum IndexCompaction {
|
||||||
|
RetrieveTheIndex,
|
||||||
|
CreateTemporaryFile,
|
||||||
|
CopyAndCompactTheIndex,
|
||||||
|
PersistTheCompactedIndex,
|
||||||
|
CloseTheIndex,
|
||||||
|
ReopenTheIndex,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
make_enum_progress! {
|
make_enum_progress! {
|
||||||
pub enum InnerSwappingTwoIndexes {
|
pub enum InnerSwappingTwoIndexes {
|
||||||
RetrieveTheTasks,
|
RetrieveTheTasks,
|
||||||
|
|||||||
@@ -275,10 +275,10 @@ impl BatchQueue {
|
|||||||
pub(crate) fn get_existing_batches(
|
pub(crate) fn get_existing_batches(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn,
|
||||||
tasks: impl IntoIterator<Item = BatchId>,
|
batches: impl IntoIterator<Item = BatchId>,
|
||||||
processing: &ProcessingTasks,
|
processing: &ProcessingTasks,
|
||||||
) -> Result<Vec<Batch>> {
|
) -> Result<Vec<Batch>> {
|
||||||
tasks
|
batches
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|batch_id| {
|
.map(|batch_id| {
|
||||||
if Some(batch_id) == processing.batch.as_ref().map(|batch| batch.uid) {
|
if Some(batch_id) == processing.batch.as_ref().map(|batch| batch.uid) {
|
||||||
@@ -295,7 +295,7 @@ impl BatchQueue {
|
|||||||
Ok(batch)
|
Ok(batch)
|
||||||
} else {
|
} else {
|
||||||
self.get_batch(rtxn, batch_id)
|
self.get_batch(rtxn, batch_id)
|
||||||
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))
|
.and_then(|batch| batch.ok_or(Error::CorruptedTaskQueue))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Result<_>>()
|
.collect::<Result<_>>()
|
||||||
|
|||||||
@@ -257,6 +257,7 @@ impl Queue {
|
|||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn,
|
||||||
kind: &KindWithContent,
|
kind: &KindWithContent,
|
||||||
task_id: Option<TaskId>,
|
task_id: Option<TaskId>,
|
||||||
|
custom_metadata: Option<String>,
|
||||||
dry_run: bool,
|
dry_run: bool,
|
||||||
) -> Result<Task> {
|
) -> Result<Task> {
|
||||||
let next_task_id = self.tasks.next_task_id(wtxn)?;
|
let next_task_id = self.tasks.next_task_id(wtxn)?;
|
||||||
@@ -280,6 +281,7 @@ impl Queue {
|
|||||||
status: Status::Enqueued,
|
status: Status::Enqueued,
|
||||||
kind: kind.clone(),
|
kind: kind.clone(),
|
||||||
network: None,
|
network: None,
|
||||||
|
custom_metadata,
|
||||||
};
|
};
|
||||||
// For deletion and cancelation tasks, we want to make extra sure that they
|
// For deletion and cancelation tasks, we want to make extra sure that they
|
||||||
// don't attempt to delete/cancel tasks that are newer than themselves.
|
// don't attempt to delete/cancel tasks that are newer than themselves.
|
||||||
@@ -310,7 +312,8 @@ impl Queue {
|
|||||||
| self.tasks.status.get(wtxn, &Status::Failed)?.unwrap_or_default()
|
| self.tasks.status.get(wtxn, &Status::Failed)?.unwrap_or_default()
|
||||||
| self.tasks.status.get(wtxn, &Status::Canceled)?.unwrap_or_default();
|
| self.tasks.status.get(wtxn, &Status::Canceled)?.unwrap_or_default();
|
||||||
|
|
||||||
let to_delete = RoaringBitmap::from_iter(finished.into_iter().rev().take(100_000));
|
let to_delete =
|
||||||
|
RoaringBitmap::from_sorted_iter(finished.into_iter().take(100_000)).unwrap();
|
||||||
|
|
||||||
// /!\ the len must be at least 2 or else we might enter an infinite loop where we only delete
|
// /!\ the len must be at least 2 or else we might enter an infinite loop where we only delete
|
||||||
// the deletion tasks we enqueued ourselves.
|
// the deletion tasks we enqueued ourselves.
|
||||||
@@ -326,7 +329,7 @@ impl Queue {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// it's safe to unwrap here because we checked the len above
|
// it's safe to unwrap here because we checked the len above
|
||||||
let newest_task_id = to_delete.iter().last().unwrap();
|
let newest_task_id = to_delete.iter().next_back().unwrap();
|
||||||
let last_task_to_delete =
|
let last_task_to_delete =
|
||||||
self.tasks.get_task(wtxn, newest_task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
self.tasks.get_task(wtxn, newest_task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
|
|
||||||
@@ -343,6 +346,7 @@ impl Queue {
|
|||||||
tasks: to_delete,
|
tasks: to_delete,
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
false,
|
false,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|||||||
@@ -68,13 +68,14 @@ impl From<KindWithContent> for AutobatchKind {
|
|||||||
KindWithContent::IndexCreation { .. } => AutobatchKind::IndexCreation,
|
KindWithContent::IndexCreation { .. } => AutobatchKind::IndexCreation,
|
||||||
KindWithContent::IndexUpdate { .. } => AutobatchKind::IndexUpdate,
|
KindWithContent::IndexUpdate { .. } => AutobatchKind::IndexUpdate,
|
||||||
KindWithContent::IndexSwap { .. } => AutobatchKind::IndexSwap,
|
KindWithContent::IndexSwap { .. } => AutobatchKind::IndexSwap,
|
||||||
KindWithContent::TaskCancelation { .. }
|
KindWithContent::IndexCompaction { .. }
|
||||||
|
| KindWithContent::TaskCancelation { .. }
|
||||||
| KindWithContent::TaskDeletion { .. }
|
| KindWithContent::TaskDeletion { .. }
|
||||||
| KindWithContent::DumpCreation { .. }
|
| KindWithContent::DumpCreation { .. }
|
||||||
| KindWithContent::Export { .. }
|
| KindWithContent::Export { .. }
|
||||||
| KindWithContent::UpgradeDatabase { .. }
|
| KindWithContent::UpgradeDatabase { .. }
|
||||||
| KindWithContent::SnapshotCreation => {
|
| KindWithContent::SnapshotCreation => {
|
||||||
panic!("The autobatcher should never be called with tasks that don't apply to an index.")
|
panic!("The autobatcher should never be called with tasks with special priority or that don't apply to an index.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -288,7 +289,9 @@ impl BatchKind {
|
|||||||
|
|
||||||
match (self, autobatch_kind) {
|
match (self, autobatch_kind) {
|
||||||
// We don't batch any of these operations
|
// We don't batch any of these operations
|
||||||
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition) => Break((this, BatchStopReason::TaskCannotBeBatched { kind, id })),
|
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition) => {
|
||||||
|
Break((this, BatchStopReason::TaskCannotBeBatched { kind, id }))
|
||||||
|
},
|
||||||
// We must not batch tasks that don't have the same index creation rights if the index doesn't already exists.
|
// We must not batch tasks that don't have the same index creation rights if the index doesn't already exists.
|
||||||
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
||||||
Break((this, BatchStopReason::IndexCreationMismatch { id }))
|
Break((this, BatchStopReason::IndexCreationMismatch { id }))
|
||||||
|
|||||||
@@ -55,6 +55,10 @@ pub(crate) enum Batch {
|
|||||||
UpgradeDatabase {
|
UpgradeDatabase {
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
|
IndexCompaction {
|
||||||
|
index_uid: String,
|
||||||
|
task: Task,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -66,6 +70,7 @@ pub(crate) enum DocumentOperation {
|
|||||||
|
|
||||||
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
pub(crate) enum IndexOperation {
|
pub(crate) enum IndexOperation {
|
||||||
DocumentOperation {
|
DocumentOperation {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
@@ -109,7 +114,8 @@ impl Batch {
|
|||||||
| Batch::Dump(task)
|
| Batch::Dump(task)
|
||||||
| Batch::IndexCreation { task, .. }
|
| Batch::IndexCreation { task, .. }
|
||||||
| Batch::Export { task }
|
| Batch::Export { task }
|
||||||
| Batch::IndexUpdate { task, .. } => {
|
| Batch::IndexUpdate { task, .. }
|
||||||
|
| Batch::IndexCompaction { task, .. } => {
|
||||||
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
||||||
}
|
}
|
||||||
Batch::SnapshotCreation(tasks)
|
Batch::SnapshotCreation(tasks)
|
||||||
@@ -154,7 +160,8 @@ impl Batch {
|
|||||||
IndexOperation { op, .. } => Some(op.index_uid()),
|
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||||
IndexCreation { index_uid, .. }
|
IndexCreation { index_uid, .. }
|
||||||
| IndexUpdate { index_uid, .. }
|
| IndexUpdate { index_uid, .. }
|
||||||
| IndexDeletion { index_uid, .. } => Some(index_uid),
|
| IndexDeletion { index_uid, .. }
|
||||||
|
| IndexCompaction { index_uid, .. } => Some(index_uid),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -174,6 +181,7 @@ impl fmt::Display for Batch {
|
|||||||
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
|
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
|
||||||
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
|
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
|
||||||
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
|
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
|
||||||
|
Batch::IndexCompaction { .. } => f.write_str("IndexCompaction")?,
|
||||||
Batch::Export { .. } => f.write_str("Export")?,
|
Batch::Export { .. } => f.write_str("Export")?,
|
||||||
Batch::UpgradeDatabase { .. } => f.write_str("UpgradeDatabase")?,
|
Batch::UpgradeDatabase { .. } => f.write_str("UpgradeDatabase")?,
|
||||||
};
|
};
|
||||||
@@ -511,17 +519,33 @@ impl IndexScheduler {
|
|||||||
return Ok(Some((Batch::TaskDeletions(tasks), current_batch)));
|
return Ok(Some((Batch::TaskDeletions(tasks), current_batch)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. we batch the export.
|
// 3. we get the next task to compact
|
||||||
|
let to_compact = self.queue.tasks.get_kind(rtxn, Kind::IndexCompaction)? & enqueued;
|
||||||
|
if let Some(task_id) = to_compact.min() {
|
||||||
|
let mut task =
|
||||||
|
self.queue.tasks.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
|
current_batch.processing(Some(&mut task));
|
||||||
|
current_batch.reason(BatchStopReason::TaskCannotBeBatched {
|
||||||
|
kind: Kind::IndexCompaction,
|
||||||
|
id: task_id,
|
||||||
|
});
|
||||||
|
let index_uid =
|
||||||
|
task.index_uid().expect("Compaction task must have an index uid").to_owned();
|
||||||
|
return Ok(Some((Batch::IndexCompaction { index_uid, task }, current_batch)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. we batch the export.
|
||||||
let to_export = self.queue.tasks.get_kind(rtxn, Kind::Export)? & enqueued;
|
let to_export = self.queue.tasks.get_kind(rtxn, Kind::Export)? & enqueued;
|
||||||
if !to_export.is_empty() {
|
if !to_export.is_empty() {
|
||||||
let task_id = to_export.iter().next().expect("There must be at least one export task");
|
let task_id = to_export.iter().next().expect("There must be at least one export task");
|
||||||
let mut task = self.queue.tasks.get_task(rtxn, task_id)?.unwrap();
|
let mut task = self.queue.tasks.get_task(rtxn, task_id)?.unwrap();
|
||||||
current_batch.processing([&mut task]);
|
current_batch.processing([&mut task]);
|
||||||
current_batch.reason(BatchStopReason::TaskKindCannotBeBatched { kind: Kind::Export });
|
current_batch
|
||||||
|
.reason(BatchStopReason::TaskCannotBeBatched { kind: Kind::Export, id: task_id });
|
||||||
return Ok(Some((Batch::Export { task }, current_batch)));
|
return Ok(Some((Batch::Export { task }, current_batch)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. we batch the snapshot.
|
// 5. we batch the snapshot.
|
||||||
let to_snapshot = self.queue.tasks.get_kind(rtxn, Kind::SnapshotCreation)? & enqueued;
|
let to_snapshot = self.queue.tasks.get_kind(rtxn, Kind::SnapshotCreation)? & enqueued;
|
||||||
if !to_snapshot.is_empty() {
|
if !to_snapshot.is_empty() {
|
||||||
let mut tasks = self.queue.tasks.get_existing_tasks(rtxn, to_snapshot)?;
|
let mut tasks = self.queue.tasks.get_existing_tasks(rtxn, to_snapshot)?;
|
||||||
@@ -531,7 +555,7 @@ impl IndexScheduler {
|
|||||||
return Ok(Some((Batch::SnapshotCreation(tasks), current_batch)));
|
return Ok(Some((Batch::SnapshotCreation(tasks), current_batch)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5. we batch the dumps.
|
// 6. we batch the dumps.
|
||||||
let to_dump = self.queue.tasks.get_kind(rtxn, Kind::DumpCreation)? & enqueued;
|
let to_dump = self.queue.tasks.get_kind(rtxn, Kind::DumpCreation)? & enqueued;
|
||||||
if let Some(to_dump) = to_dump.min() {
|
if let Some(to_dump) = to_dump.min() {
|
||||||
let mut task =
|
let mut task =
|
||||||
@@ -544,7 +568,7 @@ impl IndexScheduler {
|
|||||||
return Ok(Some((Batch::Dump(task), current_batch)));
|
return Ok(Some((Batch::Dump(task), current_batch)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 6. We make a batch from the unprioritised tasks. Start by taking the next enqueued task.
|
// 7. We make a batch from the unprioritised tasks. Start by taking the next enqueued task.
|
||||||
let task_id = if let Some(task_id) = enqueued.min() { task_id } else { return Ok(None) };
|
let task_id = if let Some(task_id) = enqueued.min() { task_id } else { return Ok(None) };
|
||||||
let mut task =
|
let mut task =
|
||||||
self.queue.tasks.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
self.queue.tasks.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ use convert_case::{Case, Casing as _};
|
|||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::heed::{Env, WithoutTls};
|
use meilisearch_types::heed::{Env, WithoutTls};
|
||||||
use meilisearch_types::milli;
|
use meilisearch_types::milli;
|
||||||
|
use meilisearch_types::milli::update::S3SnapshotOptions;
|
||||||
use meilisearch_types::tasks::Status;
|
use meilisearch_types::tasks::Status;
|
||||||
use process_batch::ProcessBatchInfo;
|
use process_batch::ProcessBatchInfo;
|
||||||
use rayon::current_num_threads;
|
use rayon::current_num_threads;
|
||||||
@@ -87,11 +88,14 @@ pub struct Scheduler {
|
|||||||
|
|
||||||
/// Snapshot compaction status.
|
/// Snapshot compaction status.
|
||||||
pub(crate) experimental_no_snapshot_compaction: bool,
|
pub(crate) experimental_no_snapshot_compaction: bool,
|
||||||
|
|
||||||
|
/// S3 Snapshot options.
|
||||||
|
pub(crate) s3_snapshot_options: Option<S3SnapshotOptions>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Scheduler {
|
impl Scheduler {
|
||||||
pub(crate) fn private_clone(&self) -> Scheduler {
|
pub(crate) fn private_clone(&self) -> Self {
|
||||||
Scheduler {
|
Self {
|
||||||
must_stop_processing: self.must_stop_processing.clone(),
|
must_stop_processing: self.must_stop_processing.clone(),
|
||||||
wake_up: self.wake_up.clone(),
|
wake_up: self.wake_up.clone(),
|
||||||
autobatching_enabled: self.autobatching_enabled,
|
autobatching_enabled: self.autobatching_enabled,
|
||||||
@@ -103,23 +107,52 @@ impl Scheduler {
|
|||||||
version_file_path: self.version_file_path.clone(),
|
version_file_path: self.version_file_path.clone(),
|
||||||
embedding_cache_cap: self.embedding_cache_cap,
|
embedding_cache_cap: self.embedding_cache_cap,
|
||||||
experimental_no_snapshot_compaction: self.experimental_no_snapshot_compaction,
|
experimental_no_snapshot_compaction: self.experimental_no_snapshot_compaction,
|
||||||
|
s3_snapshot_options: self.s3_snapshot_options.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(options: &IndexSchedulerOptions, auth_env: Env<WithoutTls>) -> Scheduler {
|
pub fn new(options: &IndexSchedulerOptions, auth_env: Env<WithoutTls>) -> Scheduler {
|
||||||
|
let IndexSchedulerOptions {
|
||||||
|
version_file_path,
|
||||||
|
auth_path: _,
|
||||||
|
tasks_path: _,
|
||||||
|
update_file_path: _,
|
||||||
|
indexes_path: _,
|
||||||
|
snapshots_path,
|
||||||
|
dumps_path,
|
||||||
|
cli_webhook_url: _,
|
||||||
|
cli_webhook_authorization: _,
|
||||||
|
task_db_size: _,
|
||||||
|
index_base_map_size: _,
|
||||||
|
enable_mdb_writemap: _,
|
||||||
|
index_growth_amount: _,
|
||||||
|
index_count: _,
|
||||||
|
indexer_config,
|
||||||
|
autobatching_enabled,
|
||||||
|
cleanup_enabled: _,
|
||||||
|
max_number_of_tasks: _,
|
||||||
|
max_number_of_batched_tasks,
|
||||||
|
batched_tasks_size_limit,
|
||||||
|
instance_features: _,
|
||||||
|
auto_upgrade: _,
|
||||||
|
embedding_cache_cap,
|
||||||
|
experimental_no_snapshot_compaction,
|
||||||
|
} = options;
|
||||||
|
|
||||||
Scheduler {
|
Scheduler {
|
||||||
must_stop_processing: MustStopProcessing::default(),
|
must_stop_processing: MustStopProcessing::default(),
|
||||||
// we want to start the loop right away in case meilisearch was ctrl+Ced while processing things
|
// we want to start the loop right away in case meilisearch was ctrl+Ced while processing things
|
||||||
wake_up: Arc::new(SignalEvent::auto(true)),
|
wake_up: Arc::new(SignalEvent::auto(true)),
|
||||||
autobatching_enabled: options.autobatching_enabled,
|
autobatching_enabled: *autobatching_enabled,
|
||||||
max_number_of_batched_tasks: options.max_number_of_batched_tasks,
|
max_number_of_batched_tasks: *max_number_of_batched_tasks,
|
||||||
batched_tasks_size_limit: options.batched_tasks_size_limit,
|
batched_tasks_size_limit: *batched_tasks_size_limit,
|
||||||
dumps_path: options.dumps_path.clone(),
|
dumps_path: dumps_path.clone(),
|
||||||
snapshots_path: options.snapshots_path.clone(),
|
snapshots_path: snapshots_path.clone(),
|
||||||
auth_env,
|
auth_env,
|
||||||
version_file_path: options.version_file_path.clone(),
|
version_file_path: version_file_path.clone(),
|
||||||
embedding_cache_cap: options.embedding_cache_cap,
|
embedding_cache_cap: *embedding_cache_cap,
|
||||||
experimental_no_snapshot_compaction: options.experimental_no_snapshot_compaction,
|
experimental_no_snapshot_compaction: *experimental_no_snapshot_compaction,
|
||||||
|
s3_snapshot_options: indexer_config.s3_snapshot_options.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,27 @@
|
|||||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||||
|
use std::fs::{remove_file, File};
|
||||||
|
use std::io::{ErrorKind, Seek, SeekFrom};
|
||||||
use std::panic::{catch_unwind, AssertUnwindSafe};
|
use std::panic::{catch_unwind, AssertUnwindSafe};
|
||||||
use std::sync::atomic::Ordering;
|
use std::sync::atomic::Ordering;
|
||||||
|
|
||||||
|
use byte_unit::Byte;
|
||||||
use meilisearch_types::batches::{BatchEnqueuedAt, BatchId};
|
use meilisearch_types::batches::{BatchEnqueuedAt, BatchId};
|
||||||
use meilisearch_types::heed::{RoTxn, RwTxn};
|
use meilisearch_types::heed::{RoTxn, RwTxn};
|
||||||
|
use meilisearch_types::milli::heed::CompactionOption;
|
||||||
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
|
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
|
||||||
use meilisearch_types::milli::{self, ChannelCongestion};
|
use meilisearch_types::milli::{self, ChannelCongestion};
|
||||||
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task};
|
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task};
|
||||||
use meilisearch_types::versioning::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
use meilisearch_types::versioning::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||||
use milli::update::Settings as MilliSettings;
|
use milli::update::Settings as MilliSettings;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
use tempfile::{PersistError, TempPath};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use super::create_batch::Batch;
|
use super::create_batch::Batch;
|
||||||
use crate::processing::{
|
use crate::processing::{
|
||||||
AtomicBatchStep, AtomicTaskStep, CreateIndexProgress, DeleteIndexProgress, FinalizingIndexStep,
|
AtomicBatchStep, AtomicTaskStep, CreateIndexProgress, DeleteIndexProgress, FinalizingIndexStep,
|
||||||
InnerSwappingTwoIndexes, SwappingTheIndexes, TaskCancelationProgress, TaskDeletionProgress,
|
IndexCompaction, InnerSwappingTwoIndexes, SwappingTheIndexes, TaskCancelationProgress,
|
||||||
UpdateIndexProgress,
|
TaskDeletionProgress, UpdateIndexProgress,
|
||||||
};
|
};
|
||||||
use crate::utils::{
|
use crate::utils::{
|
||||||
self, remove_n_tasks_datetime_earlier_than, remove_task_datetime, swap_index_uid_in_task,
|
self, remove_n_tasks_datetime_earlier_than, remove_task_datetime, swap_index_uid_in_task,
|
||||||
@@ -24,6 +29,9 @@ use crate::utils::{
|
|||||||
};
|
};
|
||||||
use crate::{Error, IndexScheduler, Result, TaskId};
|
use crate::{Error, IndexScheduler, Result, TaskId};
|
||||||
|
|
||||||
|
/// The name of the copy of the data.mdb file used during compaction.
|
||||||
|
const DATA_MDB_COPY_NAME: &str = "data.mdb.cpy";
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct ProcessBatchInfo {
|
pub struct ProcessBatchInfo {
|
||||||
/// The write channel congestion. None when unavailable: settings update.
|
/// The write channel congestion. None when unavailable: settings update.
|
||||||
@@ -147,7 +155,6 @@ impl IndexScheduler {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut index_wtxn = index.write_txn()?;
|
let mut index_wtxn = index.write_txn()?;
|
||||||
|
|
||||||
let index_version = index.get_version(&index_wtxn)?.unwrap_or((1, 12, 0));
|
let index_version = index.get_version(&index_wtxn)?.unwrap_or((1, 12, 0));
|
||||||
let package_version = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
|
let package_version = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
|
||||||
if index_version != package_version {
|
if index_version != package_version {
|
||||||
@@ -419,6 +426,47 @@ impl IndexScheduler {
|
|||||||
task.status = Status::Succeeded;
|
task.status = Status::Succeeded;
|
||||||
Ok((vec![task], ProcessBatchInfo::default()))
|
Ok((vec![task], ProcessBatchInfo::default()))
|
||||||
}
|
}
|
||||||
|
Batch::IndexCompaction { index_uid: _, mut task } => {
|
||||||
|
let KindWithContent::IndexCompaction { index_uid } = &task.kind else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let rtxn = self.env.read_txn()?;
|
||||||
|
let ret = catch_unwind(AssertUnwindSafe(|| {
|
||||||
|
self.apply_compaction(&rtxn, &progress, index_uid)
|
||||||
|
}));
|
||||||
|
|
||||||
|
let (pre_size, post_size) = match ret {
|
||||||
|
Ok(Ok(stats)) => stats,
|
||||||
|
Ok(Err(Error::AbortedTask)) => return Err(Error::AbortedTask),
|
||||||
|
Ok(Err(e)) => return Err(e),
|
||||||
|
Err(e) => {
|
||||||
|
let msg = match e.downcast_ref::<&'static str>() {
|
||||||
|
Some(s) => *s,
|
||||||
|
None => match e.downcast_ref::<String>() {
|
||||||
|
Some(s) => &s[..],
|
||||||
|
None => "Box<dyn Any>",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
return Err(Error::Export(Box::new(Error::ProcessBatchPanicked(
|
||||||
|
msg.to_string(),
|
||||||
|
))));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
task.status = Status::Succeeded;
|
||||||
|
if let Some(Details::IndexCompaction {
|
||||||
|
index_uid: _,
|
||||||
|
pre_compaction_size,
|
||||||
|
post_compaction_size,
|
||||||
|
}) = task.details.as_mut()
|
||||||
|
{
|
||||||
|
*pre_compaction_size = Some(Byte::from_u64(pre_size));
|
||||||
|
*post_compaction_size = Some(Byte::from_u64(post_size));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((vec![task], ProcessBatchInfo::default()))
|
||||||
|
}
|
||||||
Batch::Export { mut task } => {
|
Batch::Export { mut task } => {
|
||||||
let KindWithContent::Export { url, api_key, payload_size, indexes } = &task.kind
|
let KindWithContent::Export { url, api_key, payload_size, indexes } = &task.kind
|
||||||
else {
|
else {
|
||||||
@@ -494,6 +542,92 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn apply_compaction(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn,
|
||||||
|
progress: &Progress,
|
||||||
|
index_uid: &str,
|
||||||
|
) -> Result<(u64, u64)> {
|
||||||
|
// 1. Verify that the index exists
|
||||||
|
if !self.index_mapper.index_exists(rtxn, index_uid)? {
|
||||||
|
return Err(Error::IndexNotFound(index_uid.to_owned()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. We retrieve the index and create a temporary file in the index directory
|
||||||
|
progress.update_progress(IndexCompaction::RetrieveTheIndex);
|
||||||
|
let index = self.index_mapper.index(rtxn, index_uid)?;
|
||||||
|
|
||||||
|
// the index operation can take a long time, so save this handle to make it available to the search for the duration of the tick
|
||||||
|
self.index_mapper
|
||||||
|
.set_currently_updating_index(Some((index_uid.to_string(), index.clone())));
|
||||||
|
|
||||||
|
progress.update_progress(IndexCompaction::CreateTemporaryFile);
|
||||||
|
let src_path = index.path().join("data.mdb");
|
||||||
|
let pre_size = std::fs::metadata(&src_path)?.len();
|
||||||
|
|
||||||
|
let dst_path = TempPath::from_path(index.path().join(DATA_MDB_COPY_NAME));
|
||||||
|
let file = File::create(&dst_path)?;
|
||||||
|
let mut file = tempfile::NamedTempFile::from_parts(file, dst_path);
|
||||||
|
|
||||||
|
// 3. We copy the index data to the temporary file
|
||||||
|
progress.update_progress(IndexCompaction::CopyAndCompactTheIndex);
|
||||||
|
index
|
||||||
|
.copy_to_file(file.as_file_mut(), CompactionOption::Enabled)
|
||||||
|
.map_err(|error| Error::Milli { error, index_uid: Some(index_uid.to_string()) })?;
|
||||||
|
// ...and reset the file position as specified in the documentation
|
||||||
|
file.seek(SeekFrom::Start(0))?;
|
||||||
|
|
||||||
|
// 4. We replace the index data file with the temporary file
|
||||||
|
progress.update_progress(IndexCompaction::PersistTheCompactedIndex);
|
||||||
|
match file.persist(src_path) {
|
||||||
|
Ok(file) => file.sync_all()?,
|
||||||
|
// TODO see if we have a _resource busy_ error and probably handle this by:
|
||||||
|
// 1. closing the index, 2. replacing and 3. reopening it
|
||||||
|
Err(PersistError { error, file: _ }) => return Err(Error::IoError(error)),
|
||||||
|
};
|
||||||
|
|
||||||
|
// 5. Prepare to close the index
|
||||||
|
progress.update_progress(IndexCompaction::CloseTheIndex);
|
||||||
|
|
||||||
|
// unmark that the index is the processing one so we don't keep a handle to it, preventing its closing
|
||||||
|
self.index_mapper.set_currently_updating_index(None);
|
||||||
|
|
||||||
|
self.index_mapper.close_index(rtxn, index_uid)?;
|
||||||
|
drop(index);
|
||||||
|
|
||||||
|
progress.update_progress(IndexCompaction::ReopenTheIndex);
|
||||||
|
// 6. Reopen the index
|
||||||
|
// The index will use the compacted data file when being reopened
|
||||||
|
let index = self.index_mapper.index(rtxn, index_uid)?;
|
||||||
|
|
||||||
|
// if the update processed successfully, we're going to store the new
|
||||||
|
// stats of the index. Since the tasks have already been processed and
|
||||||
|
// this is a non-critical operation. If it fails, we should not fail
|
||||||
|
// the entire batch.
|
||||||
|
let res = || -> Result<_> {
|
||||||
|
let mut wtxn = self.env.write_txn()?;
|
||||||
|
let index_rtxn = index.read_txn()?;
|
||||||
|
let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn)
|
||||||
|
.map_err(|e| Error::from_milli(e, Some(index_uid.to_string())))?;
|
||||||
|
self.index_mapper.store_stats_of(&mut wtxn, index_uid, &stats)?;
|
||||||
|
wtxn.commit()?;
|
||||||
|
Ok(stats.database_size)
|
||||||
|
}();
|
||||||
|
|
||||||
|
let post_size = match res {
|
||||||
|
Ok(post_size) => post_size,
|
||||||
|
Err(e) => {
|
||||||
|
tracing::error!(
|
||||||
|
error = &e as &dyn std::error::Error,
|
||||||
|
"Could not write the stats of the index"
|
||||||
|
);
|
||||||
|
0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((pre_size, post_size))
|
||||||
|
}
|
||||||
|
|
||||||
/// Swap the index `lhs` with the index `rhs`.
|
/// Swap the index `lhs` with the index `rhs`.
|
||||||
fn apply_index_swap(
|
fn apply_index_swap(
|
||||||
&self,
|
&self,
|
||||||
@@ -781,9 +915,10 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let enqueued_tasks = &self.queue.tasks.get_status(rtxn, Status::Enqueued)?;
|
let enqueued_tasks = &self.queue.tasks.get_status(rtxn, Status::Enqueued)?;
|
||||||
|
|
||||||
// 0. Check if any upgrade task was matched.
|
// 0. Check if any upgrade or compaction tasks were matched.
|
||||||
// If so, we cancel all the failed or enqueued upgrade tasks.
|
// If so, we cancel all the failed or enqueued upgrade tasks.
|
||||||
let upgrade_tasks = &self.queue.tasks.get_kind(rtxn, Kind::UpgradeDatabase)?;
|
let upgrade_tasks = &self.queue.tasks.get_kind(rtxn, Kind::UpgradeDatabase)?;
|
||||||
|
let compaction_tasks = &self.queue.tasks.get_kind(rtxn, Kind::IndexCompaction)?;
|
||||||
let is_canceling_upgrade = !matched_tasks.is_disjoint(upgrade_tasks);
|
let is_canceling_upgrade = !matched_tasks.is_disjoint(upgrade_tasks);
|
||||||
if is_canceling_upgrade {
|
if is_canceling_upgrade {
|
||||||
let failed_tasks = self.queue.tasks.get_status(rtxn, Status::Failed)?;
|
let failed_tasks = self.queue.tasks.get_status(rtxn, Status::Failed)?;
|
||||||
@@ -848,7 +983,33 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. We now have a list of tasks to cancel, cancel them
|
// 3. If we are cancelling a compaction task, remove the tempfiles after incomplete compactions
|
||||||
|
for compaction_task in &tasks_to_cancel & compaction_tasks {
|
||||||
|
progress.update_progress(TaskCancelationProgress::CleaningCompactionLeftover);
|
||||||
|
let task = self.queue.tasks.get_task(rtxn, compaction_task)?.unwrap();
|
||||||
|
let Some(Details::IndexCompaction {
|
||||||
|
index_uid,
|
||||||
|
pre_compaction_size: _,
|
||||||
|
post_compaction_size: _,
|
||||||
|
}) = task.details
|
||||||
|
else {
|
||||||
|
unreachable!("wrong details for compaction task {compaction_task}")
|
||||||
|
};
|
||||||
|
|
||||||
|
let index_path = match self.index_mapper.index_mapping.get(rtxn, &index_uid)? {
|
||||||
|
Some(index_uuid) => self.index_mapper.index_path(index_uuid),
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = remove_file(index_path.join(DATA_MDB_COPY_NAME)) {
|
||||||
|
match e.kind() {
|
||||||
|
ErrorKind::NotFound => (),
|
||||||
|
_ => return Err(Error::IoError(e)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. We now have a list of tasks to cancel, cancel them
|
||||||
let (task_progress, progress_obj) = AtomicTaskStep::new(tasks_to_cancel.len() as u32);
|
let (task_progress, progress_obj) = AtomicTaskStep::new(tasks_to_cancel.len() as u32);
|
||||||
progress.update_progress(progress_obj);
|
progress.update_progress(progress_obj);
|
||||||
|
|
||||||
|
|||||||
@@ -370,7 +370,7 @@ fn ureq_error_into_error(error: ureq::Error) -> Error {
|
|||||||
}
|
}
|
||||||
Err(e) => e.into(),
|
Err(e) => e.into(),
|
||||||
},
|
},
|
||||||
ureq::Error::Transport(transport) => io::Error::new(io::ErrorKind::Other, transport).into(),
|
ureq::Error::Transport(transport) => io::Error::other(transport).into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ use crate::processing::{AtomicUpdateFileStep, SnapshotCreationProgress};
|
|||||||
use crate::queue::TaskQueue;
|
use crate::queue::TaskQueue;
|
||||||
use crate::{Error, IndexScheduler, Result};
|
use crate::{Error, IndexScheduler, Result};
|
||||||
|
|
||||||
|
const UPDATE_FILES_DIR_NAME: &str = "update_files";
|
||||||
|
|
||||||
/// # Safety
|
/// # Safety
|
||||||
///
|
///
|
||||||
/// See [`EnvOpenOptions::open`].
|
/// See [`EnvOpenOptions::open`].
|
||||||
@@ -78,10 +80,32 @@ impl IndexScheduler {
|
|||||||
pub(super) fn process_snapshot(
|
pub(super) fn process_snapshot(
|
||||||
&self,
|
&self,
|
||||||
progress: Progress,
|
progress: Progress,
|
||||||
mut tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
) -> Result<Vec<Task>> {
|
) -> Result<Vec<Task>> {
|
||||||
progress.update_progress(SnapshotCreationProgress::StartTheSnapshotCreation);
|
progress.update_progress(SnapshotCreationProgress::StartTheSnapshotCreation);
|
||||||
|
|
||||||
|
match self.scheduler.s3_snapshot_options.clone() {
|
||||||
|
Some(options) => {
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
{
|
||||||
|
let _ = options;
|
||||||
|
panic!("Non-unix platform does not support S3 snapshotting");
|
||||||
|
}
|
||||||
|
#[cfg(unix)]
|
||||||
|
self.runtime
|
||||||
|
.as_ref()
|
||||||
|
.expect("Runtime not initialized")
|
||||||
|
.block_on(self.process_snapshot_to_s3(progress, options, tasks))
|
||||||
|
}
|
||||||
|
None => self.process_snapshots_to_disk(progress, tasks),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_snapshots_to_disk(
|
||||||
|
&self,
|
||||||
|
progress: Progress,
|
||||||
|
mut tasks: Vec<Task>,
|
||||||
|
) -> Result<Vec<Task>, Error> {
|
||||||
fs::create_dir_all(&self.scheduler.snapshots_path)?;
|
fs::create_dir_all(&self.scheduler.snapshots_path)?;
|
||||||
let temp_snapshot_dir = tempfile::tempdir()?;
|
let temp_snapshot_dir = tempfile::tempdir()?;
|
||||||
|
|
||||||
@@ -128,7 +152,7 @@ impl IndexScheduler {
|
|||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
|
|
||||||
// 2.4 Create the update files directory
|
// 2.4 Create the update files directory
|
||||||
let update_files_dir = temp_snapshot_dir.path().join("update_files");
|
let update_files_dir = temp_snapshot_dir.path().join(UPDATE_FILES_DIR_NAME);
|
||||||
fs::create_dir_all(&update_files_dir)?;
|
fs::create_dir_all(&update_files_dir)?;
|
||||||
|
|
||||||
// 2.5 Only copy the update files of the enqueued tasks
|
// 2.5 Only copy the update files of the enqueued tasks
|
||||||
@@ -140,7 +164,7 @@ impl IndexScheduler {
|
|||||||
let task =
|
let task =
|
||||||
self.queue.tasks.get_task(&rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
self.queue.tasks.get_task(&rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
if let Some(content_uuid) = task.content_uuid() {
|
if let Some(content_uuid) = task.content_uuid() {
|
||||||
let src = self.queue.file_store.get_update_path(content_uuid);
|
let src = self.queue.file_store.update_path(content_uuid);
|
||||||
let dst = update_files_dir.join(content_uuid.to_string());
|
let dst = update_files_dir.join(content_uuid.to_string());
|
||||||
fs::copy(src, dst)?;
|
fs::copy(src, dst)?;
|
||||||
}
|
}
|
||||||
@@ -206,4 +230,407 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub(super) async fn process_snapshot_to_s3(
|
||||||
|
&self,
|
||||||
|
progress: Progress,
|
||||||
|
opts: meilisearch_types::milli::update::S3SnapshotOptions,
|
||||||
|
mut tasks: Vec<Task>,
|
||||||
|
) -> Result<Vec<Task>> {
|
||||||
|
use meilisearch_types::milli::update::S3SnapshotOptions;
|
||||||
|
|
||||||
|
let S3SnapshotOptions {
|
||||||
|
s3_bucket_url,
|
||||||
|
s3_bucket_region,
|
||||||
|
s3_bucket_name,
|
||||||
|
s3_snapshot_prefix,
|
||||||
|
s3_access_key,
|
||||||
|
s3_secret_key,
|
||||||
|
s3_max_in_flight_parts,
|
||||||
|
s3_compression_level: level,
|
||||||
|
s3_signature_duration,
|
||||||
|
s3_multipart_part_size,
|
||||||
|
} = opts;
|
||||||
|
|
||||||
|
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
||||||
|
let retry_backoff = backoff::ExponentialBackoff::default();
|
||||||
|
let db_name = {
|
||||||
|
let mut base_path = self.env.path().to_owned();
|
||||||
|
base_path.pop();
|
||||||
|
base_path.file_name().and_then(OsStr::to_str).unwrap_or("data.ms").to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
let (reader, writer) = std::io::pipe()?;
|
||||||
|
let uploader_task = tokio::spawn(multipart_stream_to_s3(
|
||||||
|
s3_bucket_url,
|
||||||
|
s3_bucket_region,
|
||||||
|
s3_bucket_name,
|
||||||
|
s3_snapshot_prefix,
|
||||||
|
s3_access_key,
|
||||||
|
s3_secret_key,
|
||||||
|
s3_max_in_flight_parts,
|
||||||
|
s3_signature_duration,
|
||||||
|
s3_multipart_part_size,
|
||||||
|
must_stop_processing,
|
||||||
|
retry_backoff,
|
||||||
|
db_name,
|
||||||
|
reader,
|
||||||
|
));
|
||||||
|
|
||||||
|
let index_scheduler = IndexScheduler::private_clone(self);
|
||||||
|
let builder_task = tokio::task::spawn_blocking(move || {
|
||||||
|
stream_tarball_into_pipe(progress, level, writer, index_scheduler)
|
||||||
|
});
|
||||||
|
|
||||||
|
let (uploader_result, builder_result) = tokio::join!(uploader_task, builder_task);
|
||||||
|
|
||||||
|
// Check uploader result first to early return on task abortion.
|
||||||
|
// safety: JoinHandle can return an error if the task was aborted, cancelled, or panicked.
|
||||||
|
uploader_result.unwrap()?;
|
||||||
|
builder_result.unwrap()?;
|
||||||
|
|
||||||
|
for task in &mut tasks {
|
||||||
|
task.status = Status::Succeeded;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tasks)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Streams a tarball of the database content into a pipe.
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn stream_tarball_into_pipe(
|
||||||
|
progress: Progress,
|
||||||
|
level: u32,
|
||||||
|
writer: std::io::PipeWriter,
|
||||||
|
index_scheduler: IndexScheduler,
|
||||||
|
) -> std::result::Result<(), Error> {
|
||||||
|
use std::io::Write as _;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
let writer = flate2::write::GzEncoder::new(writer, flate2::Compression::new(level));
|
||||||
|
let mut tarball = tar::Builder::new(writer);
|
||||||
|
|
||||||
|
// 1. Snapshot the version file
|
||||||
|
tarball
|
||||||
|
.append_path_with_name(&index_scheduler.scheduler.version_file_path, VERSION_FILE_NAME)?;
|
||||||
|
|
||||||
|
// 2. Snapshot the index scheduler LMDB env
|
||||||
|
progress.update_progress(SnapshotCreationProgress::SnapshotTheIndexScheduler);
|
||||||
|
let tasks_env_file = index_scheduler.env.try_clone_inner_file()?;
|
||||||
|
let path = Path::new("tasks").join("data.mdb");
|
||||||
|
append_file_to_tarball(&mut tarball, path, tasks_env_file)?;
|
||||||
|
|
||||||
|
// 2.3 Create a read transaction on the index-scheduler
|
||||||
|
let rtxn = index_scheduler.env.read_txn()?;
|
||||||
|
|
||||||
|
// 2.4 Create the update files directory
|
||||||
|
// And only copy the update files of the enqueued tasks
|
||||||
|
progress.update_progress(SnapshotCreationProgress::SnapshotTheUpdateFiles);
|
||||||
|
let enqueued = index_scheduler.queue.tasks.get_status(&rtxn, Status::Enqueued)?;
|
||||||
|
let (atomic, update_file_progress) = AtomicUpdateFileStep::new(enqueued.len() as u32);
|
||||||
|
progress.update_progress(update_file_progress);
|
||||||
|
|
||||||
|
// We create the update_files directory so that it
|
||||||
|
// always exists even if there are no update files
|
||||||
|
let update_files_dir = Path::new(UPDATE_FILES_DIR_NAME);
|
||||||
|
let src_update_files_dir = {
|
||||||
|
let mut path = index_scheduler.env.path().to_path_buf();
|
||||||
|
path.pop();
|
||||||
|
path.join(UPDATE_FILES_DIR_NAME)
|
||||||
|
};
|
||||||
|
tarball.append_dir(update_files_dir, src_update_files_dir)?;
|
||||||
|
|
||||||
|
for task_id in enqueued {
|
||||||
|
let task = index_scheduler
|
||||||
|
.queue
|
||||||
|
.tasks
|
||||||
|
.get_task(&rtxn, task_id)?
|
||||||
|
.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
|
if let Some(content_uuid) = task.content_uuid() {
|
||||||
|
use std::fs::File;
|
||||||
|
|
||||||
|
let src = index_scheduler.queue.file_store.update_path(content_uuid);
|
||||||
|
let mut update_file = File::open(src)?;
|
||||||
|
let path = update_files_dir.join(content_uuid.to_string());
|
||||||
|
tarball.append_file(path, &mut update_file)?;
|
||||||
|
}
|
||||||
|
atomic.fetch_add(1, Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Snapshot every indexes
|
||||||
|
progress.update_progress(SnapshotCreationProgress::SnapshotTheIndexes);
|
||||||
|
let index_mapping = index_scheduler.index_mapper.index_mapping;
|
||||||
|
let nb_indexes = index_mapping.len(&rtxn)? as u32;
|
||||||
|
let indexes_dir = Path::new("indexes");
|
||||||
|
let indexes_references: Vec<_> = index_scheduler
|
||||||
|
.index_mapper
|
||||||
|
.index_mapping
|
||||||
|
.iter(&rtxn)?
|
||||||
|
.map(|res| res.map_err(Error::from).map(|(name, uuid)| (name.to_string(), uuid)))
|
||||||
|
.collect::<Result<_, Error>>()?;
|
||||||
|
|
||||||
|
// It's prettier to use a for loop instead of the IndexMapper::try_for_each_index
|
||||||
|
// method, especially when we need to access the UUID, local path and index number.
|
||||||
|
for (i, (name, uuid)) in indexes_references.into_iter().enumerate() {
|
||||||
|
progress.update_progress(VariableNameStep::<SnapshotCreationProgress>::new(
|
||||||
|
&name, i as u32, nb_indexes,
|
||||||
|
));
|
||||||
|
let path = indexes_dir.join(uuid.to_string()).join("data.mdb");
|
||||||
|
let index = index_scheduler.index_mapper.index(&rtxn, &name)?;
|
||||||
|
let index_file = index.try_clone_inner_file()?;
|
||||||
|
tracing::trace!("Appending index file for {name} in {}", path.display());
|
||||||
|
append_file_to_tarball(&mut tarball, path, index_file)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
drop(rtxn);
|
||||||
|
|
||||||
|
// 4. Snapshot the auth LMDB env
|
||||||
|
progress.update_progress(SnapshotCreationProgress::SnapshotTheApiKeys);
|
||||||
|
let auth_env_file = index_scheduler.scheduler.auth_env.try_clone_inner_file()?;
|
||||||
|
let path = Path::new("auth").join("data.mdb");
|
||||||
|
append_file_to_tarball(&mut tarball, path, auth_env_file)?;
|
||||||
|
|
||||||
|
let mut gzencoder = tarball.into_inner()?;
|
||||||
|
gzencoder.flush()?;
|
||||||
|
gzencoder.try_finish()?;
|
||||||
|
let mut writer = gzencoder.finish()?;
|
||||||
|
writer.flush()?;
|
||||||
|
|
||||||
|
Result::<_, Error>::Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn append_file_to_tarball<W, P>(
|
||||||
|
tarball: &mut tar::Builder<W>,
|
||||||
|
path: P,
|
||||||
|
mut auth_env_file: fs::File,
|
||||||
|
) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
W: std::io::Write,
|
||||||
|
P: AsRef<std::path::Path>,
|
||||||
|
{
|
||||||
|
use std::io::{Seek as _, SeekFrom};
|
||||||
|
|
||||||
|
// Note: A previous snapshot operation may have left the cursor
|
||||||
|
// at the end of the file so we need to seek to the start.
|
||||||
|
auth_env_file.seek(SeekFrom::Start(0))?;
|
||||||
|
tarball.append_file(path, &mut auth_env_file)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Streams the content read from the given reader to S3.
|
||||||
|
#[cfg(unix)]
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
async fn multipart_stream_to_s3(
|
||||||
|
s3_bucket_url: String,
|
||||||
|
s3_bucket_region: String,
|
||||||
|
s3_bucket_name: String,
|
||||||
|
s3_snapshot_prefix: String,
|
||||||
|
s3_access_key: String,
|
||||||
|
s3_secret_key: String,
|
||||||
|
s3_max_in_flight_parts: std::num::NonZero<usize>,
|
||||||
|
s3_signature_duration: std::time::Duration,
|
||||||
|
s3_multipart_part_size: u64,
|
||||||
|
must_stop_processing: super::MustStopProcessing,
|
||||||
|
retry_backoff: backoff::exponential::ExponentialBackoff<backoff::SystemClock>,
|
||||||
|
db_name: String,
|
||||||
|
reader: std::io::PipeReader,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
use std::io;
|
||||||
|
use std::os::fd::OwnedFd;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use bytes::{Bytes, BytesMut};
|
||||||
|
use reqwest::{Client, Response};
|
||||||
|
use rusty_s3::actions::CreateMultipartUpload;
|
||||||
|
use rusty_s3::{Bucket, BucketError, Credentials, S3Action as _, UrlStyle};
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
|
|
||||||
|
let reader = OwnedFd::from(reader);
|
||||||
|
let reader = tokio::net::unix::pipe::Receiver::from_owned_fd(reader)?;
|
||||||
|
let s3_snapshot_prefix = PathBuf::from(s3_snapshot_prefix);
|
||||||
|
let url =
|
||||||
|
s3_bucket_url.parse().map_err(BucketError::ParseError).map_err(Error::S3BucketError)?;
|
||||||
|
let bucket = Bucket::new(url, UrlStyle::Path, s3_bucket_name, s3_bucket_region)
|
||||||
|
.map_err(Error::S3BucketError)?;
|
||||||
|
let credential = Credentials::new(s3_access_key, s3_secret_key);
|
||||||
|
|
||||||
|
// Note for the future (rust 1.91+): use with_added_extension, it's prettier
|
||||||
|
let object_path = s3_snapshot_prefix.join(format!("{db_name}.snapshot"));
|
||||||
|
// Note: It doesn't work on Windows and if a port to this platform is needed,
|
||||||
|
// use the slash-path crate or similar to get the correct path separator.
|
||||||
|
let object = object_path.display().to_string();
|
||||||
|
|
||||||
|
let action = bucket.create_multipart_upload(Some(&credential), &object);
|
||||||
|
let url = action.sign(s3_signature_duration);
|
||||||
|
|
||||||
|
let client = Client::new();
|
||||||
|
let resp = client.post(url).send().await.map_err(Error::S3HttpError)?;
|
||||||
|
let status = resp.status();
|
||||||
|
|
||||||
|
let body = match resp.error_for_status_ref() {
|
||||||
|
Ok(_) => resp.text().await.map_err(Error::S3HttpError)?,
|
||||||
|
Err(_) => {
|
||||||
|
return Err(Error::S3Error { status, body: resp.text().await.unwrap_or_default() })
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let multipart =
|
||||||
|
CreateMultipartUpload::parse_response(&body).map_err(|e| Error::S3XmlError(Box::new(e)))?;
|
||||||
|
tracing::debug!("Starting the upload of the snapshot to {object}");
|
||||||
|
|
||||||
|
// We use this bumpalo for etags strings.
|
||||||
|
let bump = bumpalo::Bump::new();
|
||||||
|
let mut etags = Vec::<&str>::new();
|
||||||
|
let mut in_flight = VecDeque::<(JoinHandle<reqwest::Result<Response>>, Bytes)>::with_capacity(
|
||||||
|
s3_max_in_flight_parts.get(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Part numbers start at 1 and cannot be larger than 10k
|
||||||
|
for part_number in 1u16.. {
|
||||||
|
if must_stop_processing.get() {
|
||||||
|
return Err(Error::AbortedTask);
|
||||||
|
}
|
||||||
|
|
||||||
|
let part_upload =
|
||||||
|
bucket.upload_part(Some(&credential), &object, part_number, multipart.upload_id());
|
||||||
|
let url = part_upload.sign(s3_signature_duration);
|
||||||
|
|
||||||
|
// Wait for a buffer to be ready if there are in-flight parts that landed
|
||||||
|
let mut buffer = if in_flight.len() >= s3_max_in_flight_parts.get() {
|
||||||
|
let (handle, buffer) = in_flight.pop_front().expect("At least one in flight request");
|
||||||
|
let resp = join_and_map_error(handle).await?;
|
||||||
|
extract_and_append_etag(&bump, &mut etags, resp.headers())?;
|
||||||
|
|
||||||
|
let mut buffer = match buffer.try_into_mut() {
|
||||||
|
Ok(buffer) => buffer,
|
||||||
|
Err(_) => unreachable!("All bytes references were consumed in the task"),
|
||||||
|
};
|
||||||
|
buffer.clear();
|
||||||
|
buffer
|
||||||
|
} else {
|
||||||
|
BytesMut::with_capacity(s3_multipart_part_size as usize)
|
||||||
|
};
|
||||||
|
|
||||||
|
// If we successfully read enough bytes,
|
||||||
|
// we can continue and send the buffer/part
|
||||||
|
while buffer.len() < (s3_multipart_part_size as usize / 2) {
|
||||||
|
// Wait for the pipe to be readable
|
||||||
|
|
||||||
|
reader.readable().await?;
|
||||||
|
|
||||||
|
match reader.try_read_buf(&mut buffer) {
|
||||||
|
Ok(0) => break,
|
||||||
|
// We read some bytes but maybe not enough
|
||||||
|
Ok(_) => continue,
|
||||||
|
// The readiness event is a false positive.
|
||||||
|
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => continue,
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if buffer.is_empty() {
|
||||||
|
// Break the loop if the buffer is
|
||||||
|
// empty after we tried to read bytes
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let body = buffer.freeze();
|
||||||
|
tracing::trace!("Sending part {part_number}");
|
||||||
|
let task = tokio::spawn({
|
||||||
|
let client = client.clone();
|
||||||
|
let body = body.clone();
|
||||||
|
backoff::future::retry(retry_backoff.clone(), move || {
|
||||||
|
let client = client.clone();
|
||||||
|
let url = url.clone();
|
||||||
|
let body = body.clone();
|
||||||
|
async move {
|
||||||
|
match client.put(url).body(body).send().await {
|
||||||
|
Ok(resp) if resp.status().is_client_error() => {
|
||||||
|
resp.error_for_status().map_err(backoff::Error::Permanent)
|
||||||
|
}
|
||||||
|
Ok(resp) => Ok(resp),
|
||||||
|
Err(e) => Err(backoff::Error::transient(e)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
in_flight.push_back((task, body));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (handle, _buffer) in in_flight {
|
||||||
|
let resp = join_and_map_error(handle).await?;
|
||||||
|
extract_and_append_etag(&bump, &mut etags, resp.headers())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::debug!("Finalizing the multipart upload");
|
||||||
|
|
||||||
|
let action = bucket.complete_multipart_upload(
|
||||||
|
Some(&credential),
|
||||||
|
&object,
|
||||||
|
multipart.upload_id(),
|
||||||
|
etags.iter().map(AsRef::as_ref),
|
||||||
|
);
|
||||||
|
let url = action.sign(s3_signature_duration);
|
||||||
|
let body = action.body();
|
||||||
|
let resp = backoff::future::retry(retry_backoff, move || {
|
||||||
|
let client = client.clone();
|
||||||
|
let url = url.clone();
|
||||||
|
let body = body.clone();
|
||||||
|
async move {
|
||||||
|
match client.post(url).body(body).send().await {
|
||||||
|
Ok(resp) if resp.status().is_client_error() => {
|
||||||
|
Err(backoff::Error::Permanent(Error::S3Error {
|
||||||
|
status: resp.status(),
|
||||||
|
body: resp.text().await.unwrap_or_default(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Ok(resp) => Ok(resp),
|
||||||
|
Err(e) => Err(backoff::Error::transient(Error::S3HttpError(e))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let status = resp.status();
|
||||||
|
let body = resp.text().await.map_err(|e| Error::S3Error { status, body: e.to_string() })?;
|
||||||
|
if status.is_success() {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(Error::S3Error { status, body })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
async fn join_and_map_error(
|
||||||
|
join_handle: tokio::task::JoinHandle<Result<reqwest::Response, reqwest::Error>>,
|
||||||
|
) -> Result<reqwest::Response> {
|
||||||
|
// safety: Panic happens if the task (JoinHandle) was aborted, cancelled, or panicked
|
||||||
|
let request = join_handle.await.unwrap();
|
||||||
|
let resp = request.map_err(Error::S3HttpError)?;
|
||||||
|
match resp.error_for_status_ref() {
|
||||||
|
Ok(_) => Ok(resp),
|
||||||
|
Err(_) => Err(Error::S3Error {
|
||||||
|
status: resp.status(),
|
||||||
|
body: resp.text().await.unwrap_or_default(),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn extract_and_append_etag<'b>(
|
||||||
|
bump: &'b bumpalo::Bump,
|
||||||
|
etags: &mut Vec<&'b str>,
|
||||||
|
headers: &reqwest::header::HeaderMap,
|
||||||
|
) -> Result<()> {
|
||||||
|
use reqwest::header::ETAG;
|
||||||
|
|
||||||
|
let etag = headers.get(ETAG).ok_or_else(|| Error::S3XmlError("Missing ETag header".into()))?;
|
||||||
|
let etag = etag.to_str().map_err(|e| Error::S3XmlError(Box::new(e)))?;
|
||||||
|
etags.push(bump.alloc_str(etag));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { received_document_ids: 1, deleted_documents: Some(1) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { received_document_ids: 1, deleted_documents: Some(1) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }}
|
||||||
3 {uid: 3, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Invalid type for filter subexpression: expected: String, Array, found: true.", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: true, deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }}
|
3 {uid: 3, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Invalid type for filter subexpression: expected: String, Array, found: true.", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: true, deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: enqueued, details: { received_document_ids: 1, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }}
|
2 {uid: 2, status: enqueued, details: { received_document_ids: 1, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }}
|
||||||
3 {uid: 3, status: enqueued, details: { original_filter: true, deleted_documents: None }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }}
|
3 {uid: 3, status: enqueued, details: { original_filter: true, deleted_documents: None }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set([Field("catto")]), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, vector_store: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 20, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: [current version] }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
@@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [4,]
|
[timestamp] [4,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.20.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"[current version]"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||||
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 20, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: [current version] }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 20, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: [current version] }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 20, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: [current version] }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
@@ -37,7 +37,7 @@ catto [1,]
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.20.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"[current version]"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 20, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: [current version] }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
@@ -40,7 +40,7 @@ doggo [2,]
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.20.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"[current version]"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 20, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: [current version] }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
@@ -43,7 +43,7 @@ doggo [2,3,]
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.20.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"[current version]"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
|||||||
@@ -722,7 +722,7 @@ fn basic_get_stats() {
|
|||||||
let kind = index_creation_task("whalo", "fish");
|
let kind = index_creation_task("whalo", "fish");
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
|
|
||||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
|
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||||
{
|
{
|
||||||
"indexes": {
|
"indexes": {
|
||||||
"catto": 1,
|
"catto": 1,
|
||||||
@@ -742,6 +742,7 @@ fn basic_get_stats() {
|
|||||||
"documentEdition": 0,
|
"documentEdition": 0,
|
||||||
"dumpCreation": 0,
|
"dumpCreation": 0,
|
||||||
"export": 0,
|
"export": 0,
|
||||||
|
"indexCompaction": 0,
|
||||||
"indexCreation": 3,
|
"indexCreation": 3,
|
||||||
"indexDeletion": 0,
|
"indexDeletion": 0,
|
||||||
"indexSwap": 0,
|
"indexSwap": 0,
|
||||||
@@ -753,10 +754,10 @@ fn basic_get_stats() {
|
|||||||
"upgradeDatabase": 0
|
"upgradeDatabase": 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#);
|
"###);
|
||||||
|
|
||||||
handle.advance_till([Start, BatchCreated]);
|
handle.advance_till([Start, BatchCreated]);
|
||||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
|
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||||
{
|
{
|
||||||
"indexes": {
|
"indexes": {
|
||||||
"catto": 1,
|
"catto": 1,
|
||||||
@@ -776,6 +777,7 @@ fn basic_get_stats() {
|
|||||||
"documentEdition": 0,
|
"documentEdition": 0,
|
||||||
"dumpCreation": 0,
|
"dumpCreation": 0,
|
||||||
"export": 0,
|
"export": 0,
|
||||||
|
"indexCompaction": 0,
|
||||||
"indexCreation": 3,
|
"indexCreation": 3,
|
||||||
"indexDeletion": 0,
|
"indexDeletion": 0,
|
||||||
"indexSwap": 0,
|
"indexSwap": 0,
|
||||||
@@ -787,7 +789,7 @@ fn basic_get_stats() {
|
|||||||
"upgradeDatabase": 0
|
"upgradeDatabase": 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#);
|
"###);
|
||||||
|
|
||||||
handle.advance_till([
|
handle.advance_till([
|
||||||
InsideProcessBatch,
|
InsideProcessBatch,
|
||||||
@@ -797,7 +799,7 @@ fn basic_get_stats() {
|
|||||||
Start,
|
Start,
|
||||||
BatchCreated,
|
BatchCreated,
|
||||||
]);
|
]);
|
||||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
|
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||||
{
|
{
|
||||||
"indexes": {
|
"indexes": {
|
||||||
"catto": 1,
|
"catto": 1,
|
||||||
@@ -817,6 +819,7 @@ fn basic_get_stats() {
|
|||||||
"documentEdition": 0,
|
"documentEdition": 0,
|
||||||
"dumpCreation": 0,
|
"dumpCreation": 0,
|
||||||
"export": 0,
|
"export": 0,
|
||||||
|
"indexCompaction": 0,
|
||||||
"indexCreation": 3,
|
"indexCreation": 3,
|
||||||
"indexDeletion": 0,
|
"indexDeletion": 0,
|
||||||
"indexSwap": 0,
|
"indexSwap": 0,
|
||||||
@@ -828,7 +831,7 @@ fn basic_get_stats() {
|
|||||||
"upgradeDatabase": 0
|
"upgradeDatabase": 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#);
|
"###);
|
||||||
|
|
||||||
// now we make one more batch, the started_at field of the new tasks will be past `second_start_time`
|
// now we make one more batch, the started_at field of the new tasks will be past `second_start_time`
|
||||||
handle.advance_till([
|
handle.advance_till([
|
||||||
@@ -839,7 +842,7 @@ fn basic_get_stats() {
|
|||||||
Start,
|
Start,
|
||||||
BatchCreated,
|
BatchCreated,
|
||||||
]);
|
]);
|
||||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
|
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||||
{
|
{
|
||||||
"indexes": {
|
"indexes": {
|
||||||
"catto": 1,
|
"catto": 1,
|
||||||
@@ -859,6 +862,7 @@ fn basic_get_stats() {
|
|||||||
"documentEdition": 0,
|
"documentEdition": 0,
|
||||||
"dumpCreation": 0,
|
"dumpCreation": 0,
|
||||||
"export": 0,
|
"export": 0,
|
||||||
|
"indexCompaction": 0,
|
||||||
"indexCreation": 3,
|
"indexCreation": 3,
|
||||||
"indexDeletion": 0,
|
"indexDeletion": 0,
|
||||||
"indexSwap": 0,
|
"indexSwap": 0,
|
||||||
@@ -870,7 +874,7 @@ fn basic_get_stats() {
|
|||||||
"upgradeDatabase": 0
|
"upgradeDatabase": 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ impl IndexScheduler {
|
|||||||
std::fs::create_dir_all(&options.auth_path).unwrap();
|
std::fs::create_dir_all(&options.auth_path).unwrap();
|
||||||
let auth_env = open_auth_store_env(&options.auth_path).unwrap();
|
let auth_env = open_auth_store_env(&options.auth_path).unwrap();
|
||||||
let index_scheduler =
|
let index_scheduler =
|
||||||
Self::new(options, auth_env, version, sender, planned_failures).unwrap();
|
Self::new_test(options, auth_env, version, None, sender, planned_failures).unwrap();
|
||||||
|
|
||||||
// To be 100% consistent between all test we're going to start the scheduler right now
|
// To be 100% consistent between all test we're going to start the scheduler right now
|
||||||
// and ensure it's in the expected starting state.
|
// and ensure it's in the expected starting state.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use meilisearch_types::heed::{Env, RwTxn, WithoutTls};
|
use meilisearch_types::heed::{Env, RwTxn, WithoutTls};
|
||||||
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task};
|
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task};
|
||||||
use meilisearch_types::versioning::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
use meilisearch_types::versioning;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
@@ -9,75 +9,82 @@ use crate::queue::TaskQueue;
|
|||||||
use crate::versioning::Versioning;
|
use crate::versioning::Versioning;
|
||||||
|
|
||||||
trait UpgradeIndexScheduler {
|
trait UpgradeIndexScheduler {
|
||||||
fn upgrade(
|
fn upgrade(&self, env: &Env<WithoutTls>, wtxn: &mut RwTxn) -> anyhow::Result<()>;
|
||||||
&self,
|
/// Whether the migration should be applied, depending on the initial version of the index scheduler before
|
||||||
env: &Env<WithoutTls>,
|
/// any migration was applied
|
||||||
wtxn: &mut RwTxn,
|
fn must_upgrade(&self, initial_version: (u32, u32, u32)) -> bool;
|
||||||
original: (u32, u32, u32),
|
/// A progress-centric description of the migration
|
||||||
) -> anyhow::Result<()>;
|
fn description(&self) -> &'static str;
|
||||||
fn target_version(&self) -> (u32, u32, u32);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Upgrade the index scheduler to the binary version.
|
||||||
|
///
|
||||||
|
/// # Warning
|
||||||
|
///
|
||||||
|
/// The current implementation uses a single wtxn to the index scheduler for the whole duration of the upgrade.
|
||||||
|
/// If migrations start taking take a long time, it might prevent tasks from being registered.
|
||||||
|
/// If this issue manifests, then it can be mitigated by adding a `fn target_version` to `UpgradeIndexScheduler`,
|
||||||
|
/// to be able to write intermediate versions and drop the wtxn between applying migrations.
|
||||||
pub fn upgrade_index_scheduler(
|
pub fn upgrade_index_scheduler(
|
||||||
env: &Env<WithoutTls>,
|
env: &Env<WithoutTls>,
|
||||||
versioning: &Versioning,
|
versioning: &Versioning,
|
||||||
from: (u32, u32, u32),
|
initial_version: (u32, u32, u32),
|
||||||
to: (u32, u32, u32),
|
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let current_major = to.0;
|
let target_major: u32 = versioning::VERSION_MAJOR;
|
||||||
let current_minor = to.1;
|
let target_minor: u32 = versioning::VERSION_MINOR;
|
||||||
let current_patch = to.2;
|
let target_patch: u32 = versioning::VERSION_PATCH;
|
||||||
|
let target_version = (target_major, target_minor, target_patch);
|
||||||
|
|
||||||
let upgrade_functions: &[&dyn UpgradeIndexScheduler] = &[
|
if initial_version == target_version {
|
||||||
// This is the last upgrade function, it will be called when the index is up to date.
|
return Ok(());
|
||||||
// any other upgrade function should be added before this one.
|
|
||||||
&ToCurrentNoOp {},
|
|
||||||
];
|
|
||||||
|
|
||||||
let start = match from {
|
|
||||||
(1, 12, _) => 0,
|
|
||||||
(1, 13, _) => 0,
|
|
||||||
(1, 14, _) => 0,
|
|
||||||
(1, 15, _) => 0,
|
|
||||||
(1, 16, _) => 0,
|
|
||||||
(1, 17, _) => 0,
|
|
||||||
(1, 18, _) => 0,
|
|
||||||
(1, 19, _) => 0,
|
|
||||||
(1, 20, _) => 0,
|
|
||||||
(major, minor, patch) => {
|
|
||||||
if major > current_major
|
|
||||||
|| (major == current_major && minor > current_minor)
|
|
||||||
|| (major == current_major && minor == current_minor && patch > current_patch)
|
|
||||||
{
|
|
||||||
bail!(
|
|
||||||
"Database version {major}.{minor}.{patch} is higher than the Meilisearch version {current_major}.{current_minor}.{current_patch}. Downgrade is not supported",
|
|
||||||
);
|
|
||||||
} else if major < 1 || (major == current_major && minor < 12) {
|
|
||||||
bail!(
|
|
||||||
"Database version {major}.{minor}.{patch} is too old for the experimental dumpless upgrade feature. Please generate a dump using the v{major}.{minor}.{patch} and import it in the v{current_major}.{current_minor}.{current_patch}",
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
bail!("Unknown database version: v{major}.{minor}.{patch}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
info!("Upgrading the task queue");
|
|
||||||
let mut local_from = from;
|
|
||||||
for upgrade in upgrade_functions[start..].iter() {
|
|
||||||
let target = upgrade.target_version();
|
|
||||||
info!(
|
|
||||||
"Upgrading from v{}.{}.{} to v{}.{}.{}",
|
|
||||||
local_from.0, local_from.1, local_from.2, target.0, target.1, target.2
|
|
||||||
);
|
|
||||||
let mut wtxn = env.write_txn()?;
|
|
||||||
upgrade.upgrade(env, &mut wtxn, local_from)?;
|
|
||||||
versioning.set_version(&mut wtxn, target)?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
local_from = target;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let upgrade_functions: &[&dyn UpgradeIndexScheduler] = &[
|
||||||
|
// List all upgrade functions to apply in order here.
|
||||||
|
];
|
||||||
|
|
||||||
|
let (initial_major, initial_minor, initial_patch) = initial_version;
|
||||||
|
|
||||||
|
if initial_version > target_version {
|
||||||
|
bail!(
|
||||||
|
"Database version {initial_major}.{initial_minor}.{initial_patch} is higher than the Meilisearch version {target_major}.{target_minor}.{target_patch}. Downgrade is not supported",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if initial_version < (1, 12, 0) {
|
||||||
|
bail!(
|
||||||
|
"Database version {initial_major}.{initial_minor}.{initial_patch} is too old for the experimental dumpless upgrade feature. Please generate a dump using the v{initial_major}.{initial_minor}.{initial_patch} and import it in the v{target_major}.{target_minor}.{target_patch}",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Upgrading the task queue");
|
||||||
let mut wtxn = env.write_txn()?;
|
let mut wtxn = env.write_txn()?;
|
||||||
|
let migration_count = upgrade_functions.len();
|
||||||
|
for (migration_index, upgrade) in upgrade_functions.iter().enumerate() {
|
||||||
|
if upgrade.must_upgrade(initial_version) {
|
||||||
|
info!(
|
||||||
|
"[{migration_index}/{migration_count}]Applying migration: {}",
|
||||||
|
upgrade.description()
|
||||||
|
);
|
||||||
|
|
||||||
|
upgrade.upgrade(env, &mut wtxn)?;
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"[{}/{migration_count}]Migration applied: {}",
|
||||||
|
migration_index + 1,
|
||||||
|
upgrade.description()
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
info!(
|
||||||
|
"[{migration_index}/{migration_count}]Skipping unnecessary migration: {}",
|
||||||
|
upgrade.description()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
versioning.set_version(&mut wtxn, target_version)?;
|
||||||
|
info!("Task queue upgraded, spawning the upgrade database task");
|
||||||
|
|
||||||
let queue = TaskQueue::new(env, &mut wtxn)?;
|
let queue = TaskQueue::new(env, &mut wtxn)?;
|
||||||
let uid = queue.next_task_id(&wtxn)?;
|
let uid = queue.next_task_id(&wtxn)?;
|
||||||
queue.register(
|
queue.register(
|
||||||
@@ -90,31 +97,14 @@ pub fn upgrade_index_scheduler(
|
|||||||
finished_at: None,
|
finished_at: None,
|
||||||
error: None,
|
error: None,
|
||||||
canceled_by: None,
|
canceled_by: None,
|
||||||
details: Some(Details::UpgradeDatabase { from, to }),
|
details: Some(Details::UpgradeDatabase { from: initial_version, to: target_version }),
|
||||||
status: Status::Enqueued,
|
status: Status::Enqueued,
|
||||||
kind: KindWithContent::UpgradeDatabase { from },
|
kind: KindWithContent::UpgradeDatabase { from: initial_version },
|
||||||
network: None,
|
network: None,
|
||||||
|
custom_metadata: None,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_camel_case_types)]
|
|
||||||
struct ToCurrentNoOp {}
|
|
||||||
|
|
||||||
impl UpgradeIndexScheduler for ToCurrentNoOp {
|
|
||||||
fn upgrade(
|
|
||||||
&self,
|
|
||||||
_env: &Env<WithoutTls>,
|
|
||||||
_wtxn: &mut RwTxn,
|
|
||||||
_original: (u32, u32, u32),
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn target_version(&self) -> (u32, u32, u32) {
|
|
||||||
(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -256,14 +256,15 @@ pub fn swap_index_uid_in_task(task: &mut Task, swap: (&str, &str)) {
|
|||||||
use KindWithContent as K;
|
use KindWithContent as K;
|
||||||
let mut index_uids = vec![];
|
let mut index_uids = vec![];
|
||||||
match &mut task.kind {
|
match &mut task.kind {
|
||||||
K::DocumentAdditionOrUpdate { index_uid, .. } => index_uids.push(index_uid),
|
K::DocumentAdditionOrUpdate { index_uid, .. }
|
||||||
K::DocumentEdition { index_uid, .. } => index_uids.push(index_uid),
|
| K::DocumentEdition { index_uid, .. }
|
||||||
K::DocumentDeletion { index_uid, .. } => index_uids.push(index_uid),
|
| K::DocumentDeletion { index_uid, .. }
|
||||||
K::DocumentDeletionByFilter { index_uid, .. } => index_uids.push(index_uid),
|
| K::DocumentDeletionByFilter { index_uid, .. }
|
||||||
K::DocumentClear { index_uid } => index_uids.push(index_uid),
|
| K::DocumentClear { index_uid }
|
||||||
K::SettingsUpdate { index_uid, .. } => index_uids.push(index_uid),
|
| K::SettingsUpdate { index_uid, .. }
|
||||||
K::IndexDeletion { index_uid } => index_uids.push(index_uid),
|
| K::IndexDeletion { index_uid }
|
||||||
K::IndexCreation { index_uid, .. } => index_uids.push(index_uid),
|
| K::IndexCreation { index_uid, .. }
|
||||||
|
| K::IndexCompaction { index_uid, .. } => index_uids.push(index_uid),
|
||||||
K::IndexUpdate { index_uid, new_index_uid, .. } => {
|
K::IndexUpdate { index_uid, new_index_uid, .. } => {
|
||||||
index_uids.push(index_uid);
|
index_uids.push(index_uid);
|
||||||
if let Some(new_uid) = new_index_uid {
|
if let Some(new_uid) = new_index_uid {
|
||||||
@@ -378,6 +379,7 @@ impl crate::IndexScheduler {
|
|||||||
status,
|
status,
|
||||||
kind,
|
kind,
|
||||||
network: _,
|
network: _,
|
||||||
|
custom_metadata: _,
|
||||||
} = task;
|
} = task;
|
||||||
assert_eq!(uid, task.uid);
|
assert_eq!(uid, task.uid);
|
||||||
if task.status != Status::Enqueued {
|
if task.status != Status::Enqueued {
|
||||||
@@ -618,6 +620,13 @@ impl crate::IndexScheduler {
|
|||||||
Details::UpgradeDatabase { from: _, to: _ } => {
|
Details::UpgradeDatabase { from: _, to: _ } => {
|
||||||
assert_eq!(kind.as_kind(), Kind::UpgradeDatabase);
|
assert_eq!(kind.as_kind(), Kind::UpgradeDatabase);
|
||||||
}
|
}
|
||||||
|
Details::IndexCompaction {
|
||||||
|
index_uid: _,
|
||||||
|
pre_compaction_size: _,
|
||||||
|
post_compaction_size: _,
|
||||||
|
} => {
|
||||||
|
assert_eq!(kind.as_kind(), Kind::IndexCompaction);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ impl<'a> BytesDecode<'a> for UuidCodec {
|
|||||||
impl BytesEncode<'_> for UuidCodec {
|
impl BytesEncode<'_> for UuidCodec {
|
||||||
type EItem = Uuid;
|
type EItem = Uuid;
|
||||||
|
|
||||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode(item: &Self::EItem) -> Result<Cow<'_, [u8]>, BoxedError> {
|
||||||
Ok(Cow::Borrowed(item.as_bytes()))
|
Ok(Cow::Borrowed(item.as_bytes()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -64,14 +64,7 @@ impl Versioning {
|
|||||||
};
|
};
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
let bin_major: u32 = versioning::VERSION_MAJOR;
|
upgrade_index_scheduler(env, &this, from)?;
|
||||||
let bin_minor: u32 = versioning::VERSION_MINOR;
|
|
||||||
let bin_patch: u32 = versioning::VERSION_PATCH;
|
|
||||||
let to = (bin_major, bin_minor, bin_patch);
|
|
||||||
|
|
||||||
if from != to {
|
|
||||||
upgrade_index_scheduler(env, &this, from, to)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Once we reach this point it means the upgrade process, if there was one is entirely finished
|
// Once we reach this point it means the upgrade process, if there was one is entirely finished
|
||||||
// we can safely say we reached the latest version of the index scheduler
|
// we can safely say we reached the latest version of the index scheduler
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ license.workspace = true
|
|||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = "0.6.0"
|
criterion = "0.7.0"
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "depth"
|
name = "depth"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ license.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
# fixed version due to format breakages in v1.40
|
# fixed version due to format breakages in v1.40
|
||||||
insta = { version = "=1.39.0", features = ["json", "redactions"] }
|
insta = { version = "=1.39.0", features = ["json", "redactions"] }
|
||||||
md5 = "0.7.0"
|
md5 = "0.8.0"
|
||||||
once_cell = "1.21"
|
once_cell = "1.21"
|
||||||
regex-lite = "0.1.6"
|
regex-lite = "0.1.8"
|
||||||
uuid = { version = "1.17.0", features = ["v4"] }
|
uuid = { version = "1.18.1", features = ["v4"] }
|
||||||
|
|||||||
@@ -271,9 +271,10 @@ macro_rules! json_string {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate as meili_snap;
|
use crate as meili_snap;
|
||||||
use crate::UUID_IN_MESSAGE_RE;
|
use crate::UUID_IN_MESSAGE_RE;
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn snap() {
|
fn snap() {
|
||||||
|
|||||||
@@ -12,15 +12,15 @@ license.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
enum-iterator = "2.1.0"
|
enum-iterator = "2.3.0"
|
||||||
hmac = "0.12.1"
|
hmac = "0.12.1"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
roaring = { version = "0.10.12", features = ["serde"] }
|
roaring = { version = "0.10.12", features = ["serde"] }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.228", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.145", features = ["preserve_order"] }
|
||||||
sha2 = "0.10.9"
|
sha2 = "0.10.9"
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.17"
|
||||||
time = { version = "0.3.41", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.44", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
uuid = { version = "1.18.1", features = ["serde", "v4"] }
|
||||||
|
|||||||
@@ -109,6 +109,7 @@ impl HeedAuthStore {
|
|||||||
Action::IndexesGet,
|
Action::IndexesGet,
|
||||||
Action::IndexesUpdate,
|
Action::IndexesUpdate,
|
||||||
Action::IndexesSwap,
|
Action::IndexesSwap,
|
||||||
|
Action::IndexesCompact,
|
||||||
]
|
]
|
||||||
.iter(),
|
.iter(),
|
||||||
);
|
);
|
||||||
@@ -315,7 +316,9 @@ impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
|||||||
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||||
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||||
|
|
||||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> StdResult<Cow<[u8]>, BoxedError> {
|
fn bytes_encode(
|
||||||
|
(key_id, action, index): &'_ Self::EItem,
|
||||||
|
) -> StdResult<Cow<'_, [u8]>, BoxedError> {
|
||||||
let mut bytes = Vec::new();
|
let mut bytes = Vec::new();
|
||||||
|
|
||||||
bytes.extend_from_slice(key_id.as_bytes());
|
bytes.extend_from_slice(key_id.as_bytes());
|
||||||
|
|||||||
@@ -11,38 +11,38 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4.11.0", default-features = false }
|
actix-web = { version = "4.12.0", default-features = false }
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.100"
|
||||||
bumpalo = "3.18.1"
|
bumpalo = "3.19.0"
|
||||||
bumparaw-collections = "0.1.4"
|
bumparaw-collections = "0.1.4"
|
||||||
byte-unit = { version = "5.1.6", features = ["serde"] }
|
byte-unit = { version = "5.1.6", features = ["serde"] }
|
||||||
convert_case = "0.8.0"
|
convert_case = "0.9.0"
|
||||||
csv = "1.3.1"
|
csv = "1.4.0"
|
||||||
deserr = { version = "0.6.3", features = ["actix-web"] }
|
deserr = { version = "0.6.4", features = ["actix-web"] }
|
||||||
either = { version = "1.15.0", features = ["serde"] }
|
either = { version = "1.15.0", features = ["serde"] }
|
||||||
enum-iterator = "2.1.0"
|
enum-iterator = "2.3.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.1.2"
|
flate2 = "1.1.5"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.9.7"
|
memmap2 = "0.9.9"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
roaring = { version = "0.10.12", features = ["serde"] }
|
roaring = { version = "0.10.12", features = ["serde"] }
|
||||||
rustc-hash = "2.1.1"
|
rustc-hash = "2.1.1"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.228", features = ["derive"] }
|
||||||
serde-cs = "0.2.4"
|
serde-cs = "0.2.4"
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.145", features = ["preserve_order"] }
|
||||||
tar = "0.4.44"
|
tar = "0.4.44"
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.23.0"
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.17"
|
||||||
time = { version = "0.3.41", features = [
|
time = { version = "0.3.44", features = [
|
||||||
"serde-well-known",
|
"serde-well-known",
|
||||||
"formatting",
|
"formatting",
|
||||||
"parsing",
|
"parsing",
|
||||||
"macros",
|
"macros",
|
||||||
] }
|
] }
|
||||||
tokio = "1.45"
|
tokio = "1.48"
|
||||||
utoipa = { version = "5.4.0", features = ["macros"] }
|
utoipa = { version = "5.4.0", features = ["macros"] }
|
||||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
uuid = { version = "1.18.1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
# fixed version due to format breakages in v1.40
|
# fixed version due to format breakages in v1.40
|
||||||
@@ -56,6 +56,9 @@ all-tokenizations = ["milli/all-tokenizations"]
|
|||||||
# chinese specialized tokenization
|
# chinese specialized tokenization
|
||||||
chinese = ["milli/chinese"]
|
chinese = ["milli/chinese"]
|
||||||
chinese-pinyin = ["milli/chinese-pinyin"]
|
chinese-pinyin = ["milli/chinese-pinyin"]
|
||||||
|
|
||||||
|
enterprise = ["milli/enterprise"]
|
||||||
|
|
||||||
# hebrew specialized tokenization
|
# hebrew specialized tokenization
|
||||||
hebrew = ["milli/hebrew"]
|
hebrew = ["milli/hebrew"]
|
||||||
# japanese specialized tokenization
|
# japanese specialized tokenization
|
||||||
|
|||||||
16
crates/meilisearch-types/src/community_edition.rs
Normal file
16
crates/meilisearch-types/src/community_edition.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
pub mod network {
|
||||||
|
use milli::update::new::indexer::current_edition::sharding::Shards;
|
||||||
|
|
||||||
|
use crate::network::Network;
|
||||||
|
|
||||||
|
impl Network {
|
||||||
|
pub fn shards(&self) -> Option<Shards> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sharding(&self) -> bool {
|
||||||
|
// always false in CE
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,21 +3,9 @@
|
|||||||
// Use of this source code is governed by the Business Source License 1.1,
|
// Use of this source code is governed by the Business Source License 1.1,
|
||||||
// as found in the LICENSE-EE file or at <https://mariadb.com/bsl11>
|
// as found in the LICENSE-EE file or at <https://mariadb.com/bsl11>
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
|
|
||||||
use milli::update::new::indexer::enterprise_edition::sharding::Shards;
|
use milli::update::new::indexer::enterprise_edition::sharding::Shards;
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
|
use crate::network::Network;
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct Network {
|
|
||||||
#[serde(default, rename = "self")]
|
|
||||||
pub local: Option<String>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub remotes: BTreeMap<String, Remote>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub sharding: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Network {
|
impl Network {
|
||||||
pub fn shards(&self) -> Option<Shards> {
|
pub fn shards(&self) -> Option<Shards> {
|
||||||
@@ -34,14 +22,8 @@ impl Network {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
pub fn sharding(&self) -> bool {
|
||||||
#[serde(rename_all = "camelCase")]
|
self.sharding
|
||||||
pub struct Remote {
|
}
|
||||||
pub url: String,
|
|
||||||
#[serde(default)]
|
|
||||||
pub search_api_key: Option<String>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub write_api_key: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ use actix_web::{self as aweb, HttpResponseBuilder};
|
|||||||
use aweb::http::header;
|
use aweb::http::header;
|
||||||
use aweb::rt::task::JoinError;
|
use aweb::rt::task::JoinError;
|
||||||
use convert_case::Casing;
|
use convert_case::Casing;
|
||||||
|
use milli::cellulite;
|
||||||
use milli::heed::{Error as HeedError, MdbError};
|
use milli::heed::{Error as HeedError, MdbError};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
@@ -239,6 +240,7 @@ InconsistentDocumentChangeHeaders , InvalidRequest , BAD_REQU
|
|||||||
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentSort , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentSort , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidDocumentGeojsonField , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidHeaderValue , InvalidRequest , BAD_REQUEST ;
|
InvalidHeaderValue , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidVectorDimensions , InvalidRequest , BAD_REQUEST ;
|
InvalidVectorDimensions , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidVectorsType , InvalidRequest , BAD_REQUEST ;
|
InvalidVectorsType , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -252,10 +254,12 @@ InvalidSearchHybridQuery , InvalidRequest , BAD_REQU
|
|||||||
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidIndexCustomMetadata , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
|
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidMultiSearchFacets , InvalidRequest , BAD_REQUEST ;
|
InvalidMultiSearchFacets , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidMultiSearchFacetsByIndex , InvalidRequest , BAD_REQUEST ;
|
InvalidMultiSearchFacetsByIndex , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidMultiSearchFacetOrder , InvalidRequest , BAD_REQUEST ;
|
InvalidMultiSearchFacetOrder , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidMultiSearchQueryPersonalization , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidMultiSearchFederated , InvalidRequest , BAD_REQUEST ;
|
InvalidMultiSearchFederated , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidMultiSearchFederationOptions , InvalidRequest , BAD_REQUEST ;
|
InvalidMultiSearchFederationOptions , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidMultiSearchMaxValuesPerFacet , InvalidRequest , BAD_REQUEST ;
|
InvalidMultiSearchMaxValuesPerFacet , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -313,6 +317,8 @@ InvalidSearchShowRankingScoreDetails , InvalidRequest , BAD_REQU
|
|||||||
InvalidSimilarShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
|
InvalidSimilarShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchDistinct , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchDistinct , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSearchPersonalize , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSearchPersonalizeUserContext , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchMediaAndVector , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchMediaAndVector , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -388,6 +394,9 @@ TooManyVectors , InvalidRequest , BAD_REQU
|
|||||||
UnretrievableDocument , Internal , BAD_REQUEST ;
|
UnretrievableDocument , Internal , BAD_REQUEST ;
|
||||||
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
|
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
|
||||||
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
||||||
|
InvalidS3SnapshotRequest , Internal , BAD_REQUEST ;
|
||||||
|
InvalidS3SnapshotParameters , Internal , BAD_REQUEST ;
|
||||||
|
S3SnapshotServerError , Internal , BAD_GATEWAY ;
|
||||||
|
|
||||||
// Experimental features
|
// Experimental features
|
||||||
VectorEmbeddingError , InvalidRequest , BAD_REQUEST ;
|
VectorEmbeddingError , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -396,6 +405,7 @@ InvalidDocumentEditionContext , InvalidRequest , BAD_REQU
|
|||||||
InvalidDocumentEditionFunctionFilter , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentEditionFunctionFilter , InvalidRequest , BAD_REQUEST ;
|
||||||
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST ;
|
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsIndexChat , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsIndexChat , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSettingsVectorStore , InvalidRequest , BAD_REQUEST ;
|
||||||
// Export
|
// Export
|
||||||
InvalidExportUrl , InvalidRequest , BAD_REQUEST ;
|
InvalidExportUrl , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidExportApiKey , InvalidRequest , BAD_REQUEST ;
|
InvalidExportApiKey , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -423,6 +433,7 @@ InvalidChatCompletionSearchQueryParamPrompt , InvalidRequest , BAD_REQU
|
|||||||
InvalidChatCompletionSearchFilterParamPrompt , InvalidRequest , BAD_REQUEST ;
|
InvalidChatCompletionSearchFilterParamPrompt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidChatCompletionSearchIndexUidParamPrompt , InvalidRequest , BAD_REQUEST ;
|
InvalidChatCompletionSearchIndexUidParamPrompt , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidChatCompletionPreQueryPrompt , InvalidRequest , BAD_REQUEST ;
|
InvalidChatCompletionPreQueryPrompt , InvalidRequest , BAD_REQUEST ;
|
||||||
|
RequiresEnterpriseEdition , InvalidRequest , UNAVAILABLE_FOR_LEGAL_REASONS ;
|
||||||
// Webhooks
|
// Webhooks
|
||||||
InvalidWebhooks , InvalidRequest , BAD_REQUEST ;
|
InvalidWebhooks , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidWebhookUrl , InvalidRequest , BAD_REQUEST ;
|
InvalidWebhookUrl , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -500,7 +511,9 @@ impl ErrorCode for milli::Error {
|
|||||||
Code::InvalidFacetSearchFacetName
|
Code::InvalidFacetSearchFacetName
|
||||||
}
|
}
|
||||||
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
||||||
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
UserError::InvalidGeoField { .. } | UserError::GeoJsonError(_) => {
|
||||||
|
Code::InvalidDocumentGeoField
|
||||||
|
}
|
||||||
UserError::InvalidVectorDimensions { .. }
|
UserError::InvalidVectorDimensions { .. }
|
||||||
| UserError::InvalidIndexingVectorDimensions { .. } => {
|
| UserError::InvalidIndexingVectorDimensions { .. } => {
|
||||||
Code::InvalidVectorDimensions
|
Code::InvalidVectorDimensions
|
||||||
@@ -524,6 +537,17 @@ impl ErrorCode for milli::Error {
|
|||||||
| UserError::DocumentEditionCompilationError(_) => {
|
| UserError::DocumentEditionCompilationError(_) => {
|
||||||
Code::EditDocumentsByFunctionError
|
Code::EditDocumentsByFunctionError
|
||||||
}
|
}
|
||||||
|
UserError::CelluliteError(err) => match err {
|
||||||
|
cellulite::Error::BuildCanceled
|
||||||
|
| cellulite::Error::VersionMismatchOnBuild(_)
|
||||||
|
| cellulite::Error::DatabaseDoesntExists
|
||||||
|
| cellulite::Error::Heed(_)
|
||||||
|
| cellulite::Error::InvalidGeometry(_)
|
||||||
|
| cellulite::Error::InternalDocIdMissing(_, _)
|
||||||
|
| cellulite::Error::CannotConvertLineToCell(_, _, _) => Code::Internal,
|
||||||
|
cellulite::Error::InvalidGeoJson(_) => Code::InvalidDocumentGeojsonField,
|
||||||
|
},
|
||||||
|
UserError::MalformedGeojson(_) => Code::InvalidDocumentGeojsonField,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -663,6 +687,18 @@ impl fmt::Display for deserr_codes::InvalidNetworkSearchApiKey {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for deserr_codes::InvalidSearchPersonalize {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "the value of `personalize` is invalid, expected a JSON object with `userContext` string.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for deserr_codes::InvalidSearchPersonalizeUserContext {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "the value of `userContext` is invalid, expected a string.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! internal_error {
|
macro_rules! internal_error {
|
||||||
($target:ty : $($other:path), *) => {
|
($target:ty : $($other:path), *) => {
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ pub struct RuntimeTogglableFeatures {
|
|||||||
pub composite_embedders: bool,
|
pub composite_embedders: bool,
|
||||||
pub chat_completions: bool,
|
pub chat_completions: bool,
|
||||||
pub multimodal: bool,
|
pub multimodal: bool,
|
||||||
|
pub vector_store_setting: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, Copy)]
|
#[derive(Default, Debug, Clone, Copy)]
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user