mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-27 00:00:29 +00:00
Compare commits
563 Commits
v0.25.2
...
optimize-l
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
160cba1b46 | ||
|
|
dfbdc565f9 | ||
|
|
1bb05f2716 | ||
|
|
dc21af46e5 | ||
|
|
22aa349e31 | ||
|
|
9cf6acb671 | ||
|
|
78c5826c57 | ||
|
|
7e6f3274fa | ||
|
|
94ef326be3 | ||
|
|
d01a3ab889 | ||
|
|
ad494b6f77 | ||
|
|
2f11686c81 | ||
|
|
01a47e2db5 | ||
|
|
cfacc79ad7 | ||
|
|
8e370ed9ab | ||
|
|
32d6af6527 | ||
|
|
be3240d2dd | ||
|
|
2de6868858 | ||
|
|
d419a91207 | ||
|
|
a9fb5a4d50 | ||
|
|
0353537fef | ||
|
|
da7729e4a8 | ||
|
|
074a6a0cce | ||
|
|
755b1a59a2 | ||
|
|
bb5b18b82c | ||
|
|
01d9560318 | ||
|
|
719879d4d2 | ||
|
|
fb9b298645 | ||
|
|
23f02f241e | ||
|
|
5d80ff41a2 | ||
|
|
f4989590db | ||
|
|
bba5fab5e5 | ||
|
|
05ffe24d64 | ||
|
|
6f95ae9879 | ||
|
|
480b881e15 | ||
|
|
43fecbf382 | ||
|
|
5588a6415a | ||
|
|
b0757e75c4 | ||
|
|
106be03ba8 | ||
|
|
70c55208f1 | ||
|
|
d56bf66022 | ||
|
|
2c300c72c9 | ||
|
|
a146fd45b9 | ||
|
|
63e1fb4f96 | ||
|
|
be1c6f9dc4 | ||
|
|
c251b527b0 | ||
|
|
1dc3724c1f | ||
|
|
c1ad56281d | ||
|
|
83e5f45a91 | ||
|
|
aff8cd1774 | ||
|
|
d1296d03ea | ||
|
|
6f7a4d95d9 | ||
|
|
9ea96fa5c1 | ||
|
|
19f732e623 | ||
|
|
d833e62282 | ||
|
|
a733271ced | ||
|
|
28609c4176 | ||
|
|
a626cf4c99 | ||
|
|
9b660e1058 | ||
|
|
38b85ec547 | ||
|
|
ed185fb636 | ||
|
|
f7b47b43f4 | ||
|
|
8e703fbabe | ||
|
|
7ced5c2cc7 | ||
|
|
ef95d1d545 | ||
|
|
f98c8d7f8b | ||
|
|
4862993482 | ||
|
|
8a64ee0c14 | ||
|
|
05ee2eff01 | ||
|
|
7ee8855499 | ||
|
|
7f4fab876d | ||
|
|
688d6f704b | ||
|
|
f83188fd60 | ||
|
|
9e261b996f | ||
|
|
90b0a4e99f | ||
|
|
dad86fc3d6 | ||
|
|
7feb15df28 | ||
|
|
bf865f51bb | ||
|
|
13f258513f | ||
|
|
f8aa21bc16 | ||
|
|
1ffe90bf15 | ||
|
|
c47369b502 | ||
|
|
32c8846514 | ||
|
|
7490383d4f | ||
|
|
c6ed756dbc | ||
|
|
de16de20f4 | ||
|
|
c484d28646 | ||
|
|
5318e53248 | ||
|
|
06e05cc4f8 | ||
|
|
ba839a909f | ||
|
|
8b98303191 | ||
|
|
2dde6fadb4 | ||
|
|
eb8d53a915 | ||
|
|
10f3150150 | ||
|
|
54cd9976d7 | ||
|
|
5ae5b06018 | ||
|
|
6f910f89eb | ||
|
|
b455c3897f | ||
|
|
9a8fb6c55a | ||
|
|
4016161035 | ||
|
|
9d692ba1c6 | ||
|
|
22e1ac969a | ||
|
|
3340af1ba9 | ||
|
|
49d509936b | ||
|
|
e46b853fbf | ||
|
|
44e004d895 | ||
|
|
71bf9b5b9b | ||
|
|
053071d866 | ||
|
|
0f6e650ba2 | ||
|
|
97daea5a66 | ||
|
|
8990b12609 | ||
|
|
07a35c6445 | ||
|
|
4fc73195e6 | ||
|
|
1425d62a31 | ||
|
|
87d4bf672c | ||
|
|
2063fbd985 | ||
|
|
de356061db | ||
|
|
9a6841c7ce | ||
|
|
354f7fb2bf | ||
|
|
0333bad057 | ||
|
|
0e416b4bcd | ||
|
|
20dd259f23 | ||
|
|
18095fa4e1 | ||
|
|
b8745420da | ||
|
|
36cb09eb25 | ||
|
|
8fc3b7d3b0 | ||
|
|
64e3096790 | ||
|
|
b594d49def | ||
|
|
fbba67fbe9 | ||
|
|
232b2baaa3 | ||
|
|
02c5c193a2 | ||
|
|
b9b32d65a8 | ||
|
|
680606fd82 | ||
|
|
4a494ad2fa | ||
|
|
2b2e571c76 | ||
|
|
6f0d3472b1 | ||
|
|
5cd13cc303 | ||
|
|
1e3dcbea3f | ||
|
|
b96399d24b | ||
|
|
5450b5ced3 | ||
|
|
c924614527 | ||
|
|
96d4fd54bb | ||
|
|
3e5d6be86b | ||
|
|
2b944ecd89 | ||
|
|
db42268888 | ||
|
|
108b3520de | ||
|
|
f64b824c45 | ||
|
|
fc4990b968 | ||
|
|
39a1dcb32c | ||
|
|
afcc493480 | ||
|
|
6171f17f1d | ||
|
|
55169ff914 | ||
|
|
0a16f71563 | ||
|
|
bd280f75e7 | ||
|
|
617802bae7 | ||
|
|
1a7631c807 | ||
|
|
17f30c2b2d | ||
|
|
09938c9b6f | ||
|
|
f5306eb5b0 | ||
|
|
173eea06e1 | ||
|
|
8d09772334 | ||
|
|
987a7f8926 | ||
|
|
0928f3d41c | ||
|
|
09ec8e9fca | ||
|
|
1968950b0f | ||
|
|
6ffa222218 | ||
|
|
79e67df73d | ||
|
|
7fd66b80ca | ||
|
|
0cfad7eeac | ||
|
|
72be296852 | ||
|
|
a7bff35e49 | ||
|
|
3b01ed4fe8 | ||
|
|
cbd27d313c | ||
|
|
6ac8675c6d | ||
|
|
df61ca9cae | ||
|
|
bbd685af5e | ||
|
|
9b9cbc815b | ||
|
|
fd11903920 | ||
|
|
c3003065e8 | ||
|
|
c6ce3452cf | ||
|
|
e5b760c59a | ||
|
|
277a0a7967 | ||
|
|
64b5b2e1f8 | ||
|
|
10d3b367dc | ||
|
|
ba55905377 | ||
|
|
0e7e16ae72 | ||
|
|
80c156df3f | ||
|
|
4b6c3e72ff | ||
|
|
3e46543060 | ||
|
|
b83455f345 | ||
|
|
953a209f02 | ||
|
|
0c5352fc22 | ||
|
|
8ac8fcb0c9 | ||
|
|
4667c9fe1a | ||
|
|
12b5eabd5d | ||
|
|
cf2d8de48a | ||
|
|
419922e475 | ||
|
|
c9cd1738a5 | ||
|
|
0258659278 | ||
|
|
ce37f53a16 | ||
|
|
bcb51905d7 | ||
|
|
10a71fdb10 | ||
|
|
f8d3f739ad | ||
|
|
bb405aa729 | ||
|
|
7e3d5ebc8e | ||
|
|
dfce9ba468 | ||
|
|
9eea142e2b | ||
|
|
8b8c3e32f0 | ||
|
|
08d72e32a4 | ||
|
|
ac9e7bdbe3 | ||
|
|
4512eed8f5 | ||
|
|
e769043576 | ||
|
|
df721b2e9e | ||
|
|
0656df3a6d | ||
|
|
7652295d2c | ||
|
|
94b32cce01 | ||
|
|
b2e2dc8558 | ||
|
|
1816db8c1f | ||
|
|
c295924ea2 | ||
|
|
1f62e83267 | ||
|
|
b3c8915702 | ||
|
|
151f494110 | ||
|
|
96152a3d32 | ||
|
|
84f52ac175 | ||
|
|
70916d6596 | ||
|
|
b9a79eb858 | ||
|
|
a57b2d9538 | ||
|
|
34c8888f56 | ||
|
|
d54643455c | ||
|
|
96a5791e39 | ||
|
|
e2c204cf86 | ||
|
|
d80e8b64af | ||
|
|
c11d21879a | ||
|
|
d6dd234914 | ||
|
|
4970525541 | ||
|
|
461b91fd13 | ||
|
|
004c8b6be3 | ||
|
|
9d5cc88cd5 | ||
|
|
d22f07f5b2 | ||
|
|
e81c7aa2e6 | ||
|
|
39db6ea42b | ||
|
|
47007fa71b | ||
|
|
627f13df85 | ||
|
|
97c14f6fcc | ||
|
|
446f1f31e0 | ||
|
|
ddad6cc069 | ||
|
|
ab39df9693 | ||
|
|
1465b5e0ff | ||
|
|
8800b348f0 | ||
|
|
082d6b89ff | ||
|
|
b82c86c8f5 | ||
|
|
36d94257d8 | ||
|
|
3f80468f18 | ||
|
|
8509243e68 | ||
|
|
3684c822f1 | ||
|
|
80f7d87356 | ||
|
|
d2f457a076 | ||
|
|
e5ef5a6f9c | ||
|
|
5450fecaef | ||
|
|
deba0cc096 | ||
|
|
26e7bdf702 | ||
|
|
3441cc6c36 | ||
|
|
c7711c7816 | ||
|
|
d47b997120 | ||
|
|
1e310ecc7d | ||
|
|
4cb2c6ef1e | ||
|
|
a9ef399a6b | ||
|
|
5a2972fc19 | ||
|
|
ba51ca83ec | ||
|
|
1647ca3c1f | ||
|
|
74a1f88d88 | ||
|
|
f58507379a | ||
|
|
6b2016b350 | ||
|
|
3015265bde | ||
|
|
49d8fadb52 | ||
|
|
127171c812 | ||
|
|
67b6f4340a | ||
|
|
986a99296d | ||
|
|
92d86ce6aa | ||
|
|
3c85b29865 | ||
|
|
8349f38197 | ||
|
|
64654ef7c3 | ||
|
|
0f9c134114 | ||
|
|
7b47e4e87a | ||
|
|
8743d73973 | ||
|
|
f0aceb4fba | ||
|
|
61035a3ea4 | ||
|
|
4778884105 | ||
|
|
57fde30b91 | ||
|
|
56eb2907c9 | ||
|
|
414d0907ce | ||
|
|
60a8249de6 | ||
|
|
46cdc17701 | ||
|
|
6a0231cb28 | ||
|
|
7fa3eb1003 | ||
|
|
2f0625a984 | ||
|
|
737b891a41 | ||
|
|
5a5066023b | ||
|
|
aa50acb031 | ||
|
|
9935db86c7 | ||
|
|
f65116b208 | ||
|
|
341756a0eb | ||
|
|
5f0e9b63d2 | ||
|
|
ca9ba2d90c | ||
|
|
2c248a68a4 | ||
|
|
641ca5a857 | ||
|
|
6bf4db0bca | ||
|
|
4e9accdeb7 | ||
|
|
ae4e419db4 | ||
|
|
50763aac82 | ||
|
|
3517eae47f | ||
|
|
0250ea9157 | ||
|
|
6d221058f1 | ||
|
|
a23998f2e3 | ||
|
|
49e857776c | ||
|
|
7fa7f3d1a4 | ||
|
|
85d19bfb3e | ||
|
|
a65a2bea1e | ||
|
|
5670b4d012 | ||
|
|
b9866d8df2 | ||
|
|
b9b9cba154 | ||
|
|
cd2239eb2d | ||
|
|
348af6cfbf | ||
|
|
5337bdb9c5 | ||
|
|
a350cc1186 | ||
|
|
5c4c38c79c | ||
|
|
b94eabe48c | ||
|
|
c46f3587de | ||
|
|
34f75d9792 | ||
|
|
3c5f7dbf7e | ||
|
|
3d0a4a3d18 | ||
|
|
6025372565 | ||
|
|
3d10af0333 | ||
|
|
c07f3b44b7 | ||
|
|
38d681c230 | ||
|
|
e85377e725 | ||
|
|
6ff8bf823d | ||
|
|
4d25229df9 | ||
|
|
f1cd6b6ee8 | ||
|
|
63f75bd187 | ||
|
|
acf3357cf3 | ||
|
|
202d6105b2 | ||
|
|
0714551101 | ||
|
|
04381011b0 | ||
|
|
1ef87cc6d0 | ||
|
|
4a9000bb96 | ||
|
|
754c49f991 | ||
|
|
97adef6bfc | ||
|
|
a7fd199ded | ||
|
|
2692b8c960 | ||
|
|
58a1124e9a | ||
|
|
b57ad15a24 | ||
|
|
9b064e53e7 | ||
|
|
289bfd46ff | ||
|
|
64b0a50a58 | ||
|
|
b1333ab5b0 | ||
|
|
276dc6043a | ||
|
|
b9e676b8ca | ||
|
|
6c06fb226d | ||
|
|
41249be274 | ||
|
|
049cf0fcee | ||
|
|
2ee210483f | ||
|
|
13205066f3 | ||
|
|
b3661bf8ec | ||
|
|
0990e95830 | ||
|
|
f67167fa9f | ||
|
|
31584f34e8 | ||
|
|
a70e0a6422 | ||
|
|
348345f555 | ||
|
|
683206e140 | ||
|
|
69d312209e | ||
|
|
013fe4cbc9 | ||
|
|
dc2cc1ee89 | ||
|
|
bb5f0e1485 | ||
|
|
d5e33637b7 | ||
|
|
c321ac61b5 | ||
|
|
67dea08a0a | ||
|
|
e9f66b8766 | ||
|
|
dd43ba6234 | ||
|
|
27a88bcd47 | ||
|
|
065fe19452 | ||
|
|
981fba5b44 | ||
|
|
09734f0732 | ||
|
|
a523828f61 | ||
|
|
7f7958f815 | ||
|
|
9e344f6576 | ||
|
|
09a72cee03 | ||
|
|
6fc6b83632 | ||
|
|
eee2cd5abf | ||
|
|
87e4125875 | ||
|
|
7ece7a9d9e | ||
|
|
403f03cb2c | ||
|
|
b28aa8e666 | ||
|
|
98107565c0 | ||
|
|
a2d7c16f91 | ||
|
|
ffafd5b976 | ||
|
|
9f1c88680d | ||
|
|
9edd407a88 | ||
|
|
8bc6e8dcf9 | ||
|
|
2624c76517 | ||
|
|
891d042164 | ||
|
|
b3a11e04af | ||
|
|
acdb10a307 | ||
|
|
8fecc6238d | ||
|
|
405af09fc8 | ||
|
|
0d6be2efab | ||
|
|
94f04e79eb | ||
|
|
381e98053f | ||
|
|
6c2fdc7743 | ||
|
|
513b37e245 | ||
|
|
13a0e78d3f | ||
|
|
80d8ac40af | ||
|
|
48d107cc13 | ||
|
|
3ef250c30a | ||
|
|
c7b489f8cb | ||
|
|
ce12000af3 | ||
|
|
3c72f4dc51 | ||
|
|
ce85981a4e | ||
|
|
193c666bf9 | ||
|
|
705d10a96d | ||
|
|
c0056ab73f | ||
|
|
3df542f072 | ||
|
|
09212abdf7 | ||
|
|
ee6be4f6b9 | ||
|
|
4e3b20ed73 | ||
|
|
6a82a055d3 | ||
|
|
7e65816d63 | ||
|
|
5bffa4b7f9 | ||
|
|
d1c0ecceb9 | ||
|
|
1d683865cf | ||
|
|
4aef7c5ac5 | ||
|
|
968053649b | ||
|
|
ac48860bbb | ||
|
|
46e6d23dd2 | ||
|
|
55c9514c6b | ||
|
|
86c1e83ea1 | ||
|
|
3273fe0470 | ||
|
|
bb9372114c | ||
|
|
7468a5e96c | ||
|
|
a87faa0db7 | ||
|
|
b2bbd13c27 | ||
|
|
22c61a1ecb | ||
|
|
e271395971 | ||
|
|
32843f30d9 | ||
|
|
469aa8feab | ||
|
|
748d5b69a5 | ||
|
|
3b2fe3aec8 | ||
|
|
d9eb1f7f00 | ||
|
|
f5a72bb19a | ||
|
|
35bf7ee538 | ||
|
|
c8895cab77 | ||
|
|
b669a73432 | ||
|
|
833f7fbdbe | ||
|
|
62ce8e0bda | ||
|
|
ddd25bfe01 | ||
|
|
19da45c53b | ||
|
|
0026410c61 | ||
|
|
b57c59baa4 | ||
|
|
a356c8359c | ||
|
|
b138b92d39 | ||
|
|
58e2903177 | ||
|
|
af8a5f2c21 | ||
|
|
d6400aef27 | ||
|
|
81fe65afed | ||
|
|
c2b58720d1 | ||
|
|
5515aa5045 | ||
|
|
15150db957 | ||
|
|
3b2e467ca6 | ||
|
|
0c9e8cdf8d | ||
|
|
8d624b3800 | ||
|
|
4fbb83a34d | ||
|
|
961e22493c | ||
|
|
09ee8e34a5 | ||
|
|
7e832105d7 | ||
|
|
ff6a7b6007 | ||
|
|
6312e7f1f3 | ||
|
|
bfb375ac87 | ||
|
|
21d277a0ef | ||
|
|
c3e3c900f2 | ||
|
|
05c8d81e65 | ||
|
|
cd6276eef9 | ||
|
|
7bcaa2fd13 | ||
|
|
67ecd7c147 | ||
|
|
ce6ff294cf | ||
|
|
b318ab46cb | ||
|
|
5890600101 | ||
|
|
e2a9414c7a | ||
|
|
216965e9d9 | ||
|
|
d0ddbcc2b2 | ||
|
|
41db2601a6 | ||
|
|
42cb94e1f4 | ||
|
|
6e7a0cc65d | ||
|
|
23eba82038 | ||
|
|
001b9acb63 | ||
|
|
af65ccfd6a | ||
|
|
0c7251475d | ||
|
|
1a87b2f37d | ||
|
|
752a0e13ad | ||
|
|
ccaca33446 | ||
|
|
2a90e805a2 | ||
|
|
c4a2d70d19 | ||
|
|
f7e4a0177d | ||
|
|
cca65499de | ||
|
|
80fa7dbbfa | ||
|
|
c24b1e5250 | ||
|
|
78cf8f1f9f | ||
|
|
1da7277817 | ||
|
|
c71c95feb0 | ||
|
|
3bee31e6c7 | ||
|
|
9448ca58aa | ||
|
|
c9a236b0af | ||
|
|
622c15e825 | ||
|
|
054598734a | ||
|
|
7ca647f0d0 | ||
|
|
aa50fcb1f0 | ||
|
|
b408de0761 | ||
|
|
72d9c5ee5c | ||
|
|
2b7440d4b5 | ||
|
|
3bc6a18bcd | ||
|
|
db56d6cb11 | ||
|
|
a5759139bf | ||
|
|
8a959da120 | ||
|
|
0a78750465 | ||
|
|
372f4fc924 | ||
|
|
ae5b401e74 | ||
|
|
c562655be7 | ||
|
|
c8bb54cd94 | ||
|
|
8c80326dd5 | ||
|
|
bad4bed439 | ||
|
|
7828da15c3 | ||
|
|
7e2f6063ae | ||
|
|
2b766a2f26 | ||
|
|
8ae504bfb0 | ||
|
|
5981e6c57c | ||
|
|
1be3a1e945 | ||
|
|
629b897845 | ||
|
|
9f5fee404b | ||
|
|
40bf98711c | ||
|
|
f9f075bca2 | ||
|
|
0c1a3d59eb | ||
|
|
a7e634bd4f | ||
|
|
78a381a30b | ||
|
|
343bce6a29 | ||
|
|
d263f762bf | ||
|
|
dfaeb19566 | ||
|
|
010dcc3e80 | ||
|
|
d0aa5f747c | ||
|
|
f6d53e03f1 | ||
|
|
3ecebd15ee | ||
|
|
db83e39a7f | ||
|
|
5d48f72ade | ||
|
|
1818026a84 | ||
|
|
1824b3c07b | ||
|
|
c9c7da3626 | ||
|
|
5fb4ed60e7 | ||
|
|
f56989e46e | ||
|
|
3b30fadb55 | ||
|
|
d7df4d6b84 | ||
|
|
b28a465304 | ||
|
|
ea0a5271f7 | ||
|
|
80d039042b | ||
|
|
5606e22d97 | ||
|
|
dadce6032d |
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -23,8 +23,8 @@ A clear and concise description of what you expected to happen.
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**MeiliSearch version:** [e.g. v0.20.0]
|
||||
**Meilisearch version:** [e.g. v0.20.0]
|
||||
|
||||
**Additional context**
|
||||
Additional information that may be relevant to the issue.
|
||||
[e.g. architecture, device, OS, browser]
|
||||
[e.g. architecture, device, OS, browser]
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/config.yml
vendored
9
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,10 +1,13 @@
|
||||
contact_links:
|
||||
- name: Feature request
|
||||
- name: Language support request & feedback
|
||||
url: https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal?discussions_q=label%3Aproduct%3Acore%3Atokenizer+category%3A%22Feedback+%26+Feature+Proposal%22
|
||||
about: The requests and feedback regarding Language support are not managed in this repository. Please upvote the related discussion in our dedicated product repository or open a new one if it doesn't exist.
|
||||
- name: Feature request & feedback
|
||||
url: https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal
|
||||
about: The feature requests are not managed in this repository, please open a discussion in our dedicated product repository
|
||||
about: The feature requests and feedback regarding the already existing features are not managed in this repository. Please open a discussion in our dedicated product repository
|
||||
- name: Documentation issue
|
||||
url: https://github.com/meilisearch/documentation/issues/new
|
||||
about: For documentation issues, open an issue or a PR in the documentation repository
|
||||
- name: Support questions & other
|
||||
url: https://github.com/meilisearch/MeiliSearch/discussions/new
|
||||
url: https://github.com/meilisearch/meilisearch/discussions/new
|
||||
about: For any other question, open a discussion in this repository
|
||||
|
||||
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Set update schedule for GitHub Actions only
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
labels:
|
||||
- 'skip changelog'
|
||||
- 'dependencies'
|
||||
rebase-strategy: disabled
|
||||
28
.github/scripts/check-release.sh
vendored
Normal file
28
.github/scripts/check-release.sh
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
|
||||
# check_tag $current_tag $file_tag $file_name
|
||||
function check_tag {
|
||||
if [[ "$1" != "$2" ]]; then
|
||||
echo "Error: the current tag does not match the version in $3: found $2 - expected $1"
|
||||
ret=1
|
||||
fi
|
||||
}
|
||||
|
||||
ret=0
|
||||
current_tag=${GITHUB_REF#'refs/tags/v'}
|
||||
|
||||
toml_files='*/Cargo.toml'
|
||||
for toml_file in $toml_files;
|
||||
do
|
||||
file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
||||
check_tag $current_tag $file_tag $toml_file
|
||||
done
|
||||
|
||||
lock_file='Cargo.lock'
|
||||
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')
|
||||
check_tag $current_tag $lock_tag $lock_file
|
||||
|
||||
if [[ "$ret" -eq 0 ]] ; then
|
||||
echo 'OK'
|
||||
fi
|
||||
exit $ret
|
||||
@@ -1,14 +1,14 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Checks if the current tag should be the latest (in terms of semver and not of release date).
|
||||
# Ex: previous tag -> v0.10.1
|
||||
# new tag -> v0.8.12
|
||||
# The new tag should not be the latest
|
||||
# So it returns "false", the CI should not run for the release v0.8.2
|
||||
|
||||
# Used in GHA in publish-docker-latest.yml
|
||||
# Was used in our CIs to publish the latest docker image. Not used anymore, will be used again when v1 and v2 will be out and we will want to maintain multiple stable versions.
|
||||
# Returns "true" or "false" (as a string) to be used in the `if` in GHA
|
||||
|
||||
# Checks if the current tag should be the latest (in terms of semver and not of release date).
|
||||
# Ex: previous tag -> v2.1.1
|
||||
# new tag -> v1.20.3
|
||||
# The new tag (v1.20.3) should NOT be the latest
|
||||
# So it returns "false", the `latest` tag should not be updated for the release v1.20.3 and still need to correspond to v2.1.1
|
||||
|
||||
# GLOBAL
|
||||
GREP_SEMVER_REGEXP='v\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)$' # i.e. v[number].[number].[number]
|
||||
|
||||
@@ -74,7 +74,7 @@ semverLT() {
|
||||
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||
get_latest() {
|
||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||
curl -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file"
|
||||
curl -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file"
|
||||
releases=$(cat "$temp_file" | \
|
||||
grep -E "tag_name|draft|prerelease" \
|
||||
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
||||
20
.github/workflows/README.md
vendored
20
.github/workflows/README.md
vendored
@@ -1,20 +0,0 @@
|
||||
# GitHub Actions Workflow for MeiliSearch
|
||||
|
||||
> **Note:**
|
||||
|
||||
> - We do not use [cache](https://github.com/actions/cache) yet but we could use it to speed up CI
|
||||
|
||||
## Workflow
|
||||
|
||||
- On each pull request, we trigger `cargo test`.
|
||||
- On each tag, we build:
|
||||
- the tagged Docker image and publish it to Docker Hub
|
||||
- the binaries for MacOS, Ubuntu, and Windows
|
||||
- the Debian package
|
||||
- On each stable release (`v*.*.*` tag):
|
||||
- we build the `latest` Docker image and publish it to Docker Hub
|
||||
- we publish the binary to Hombrew and Gemfury
|
||||
|
||||
## Problems
|
||||
|
||||
- We do not test on Windows because we are unable to make it work, there is a disk space problem.
|
||||
4
.github/workflows/coverage.yml
vendored
4
.github/workflows/coverage.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
nightly-coverage:
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=unwind -Zpanic_abort_tests"
|
||||
- uses: actions-rs/grcov@v0.1
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
file: ${{ steps.coverage.outputs.report }}
|
||||
|
||||
23
.github/workflows/create-issue-dependencies.yml
vendored
Normal file
23
.github/workflows/create-issue-dependencies.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Create issue to upgrade dependencies
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 1 */3 *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
create-issue:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Create an issue
|
||||
uses: actions-ecosystem/action-create-issue@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
title: Upgrade dependencies
|
||||
body: |
|
||||
We need to update the dependencies of the Meilisearch repository, and, if possible, the dependencies of all the core-team repositories that Meilisearch depends on (milli, charabia, heed...).
|
||||
|
||||
⚠️ This issue should only be done at the beginning of the sprint!
|
||||
labels: |
|
||||
dependencies
|
||||
maintenance
|
||||
2
.github/workflows/flaky.yml
vendored
2
.github/workflows/flaky.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
runs-on: ubuntu-18.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky 100 times
|
||||
|
||||
106
.github/workflows/publish-binaries.yml
vendored
106
.github/workflows/publish-binaries.yml
vendored
@@ -5,9 +5,33 @@ on:
|
||||
name: Publish binaries to release
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
name: Check the version validity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Check if the tag has the v<nmumber>.<number>.<number> format.
|
||||
# If yes, it means we are publishing an official release.
|
||||
# If no, we are releasing a RC, so no need to check the version.
|
||||
- name: Check tag format
|
||||
if: github.event_name != 'schedule'
|
||||
id: check-tag-format
|
||||
run: |
|
||||
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
||||
|
||||
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo ::set-output name=stable::true
|
||||
else
|
||||
echo ::set-output name=stable::false
|
||||
fi
|
||||
- name: Check release validity
|
||||
if: steps.check-tag-format.outputs.stable == 'true'
|
||||
run: bash .github/scripts/check-release.sh
|
||||
|
||||
publish:
|
||||
name: Publish for ${{ matrix.os }}
|
||||
name: Publish binary for ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: check-version
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -27,7 +51,7 @@ jobs:
|
||||
- uses: hecrj/setup-rust-action@master
|
||||
with:
|
||||
rust-version: stable
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
- name: Upload binaries to release
|
||||
@@ -38,28 +62,70 @@ jobs:
|
||||
asset_name: ${{ matrix.asset_name }}
|
||||
tag: ${{ github.ref }}
|
||||
|
||||
publish-armv8:
|
||||
name: Publish for ARMv8
|
||||
runs-on: ubuntu-18.04
|
||||
publish-aarch64:
|
||||
name: Publish binary for aarch64
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: check-version
|
||||
continue-on-error: false
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- build: aarch64
|
||||
os: ubuntu-18.04
|
||||
target: aarch64-unknown-linux-gnu
|
||||
linker: gcc-aarch64-linux-gnu
|
||||
use-cross: true
|
||||
asset_name: meilisearch-linux-aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: uraimo/run-on-arch-action@v2.1.1
|
||||
id: runcmd
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Installing Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
arch: aarch64 # aka ARMv8
|
||||
distro: ubuntu18.04
|
||||
env: |
|
||||
JEMALLOC_SYS_WITH_LG_PAGE: 16
|
||||
run: |
|
||||
apt update
|
||||
apt install -y curl gcc make
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal --default-toolchain stable
|
||||
source $HOME/.cargo/env
|
||||
cargo build --release --locked
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
|
||||
- name: APT update
|
||||
run: |
|
||||
sudo apt update
|
||||
|
||||
- name: Install target specific tools
|
||||
if: matrix.use-cross
|
||||
run: |
|
||||
sudo apt-get install -y ${{ matrix.linker }}
|
||||
|
||||
- name: Configure target aarch64 GNU
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
## Environment variable is not passed using env:
|
||||
## LD gold won't work with MUSL
|
||||
# env:
|
||||
# JEMALLOC_SYS_WITH_LG_PAGE: 16
|
||||
# RUSTFLAGS: '-Clink-arg=-fuse-ld=gold'
|
||||
run: |
|
||||
echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config
|
||||
echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||
echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV
|
||||
echo RUSTFLAGS="-Clink-arg=-fuse-ld=gold" >> $GITHUB_ENV
|
||||
|
||||
- name: Cargo build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
use-cross: ${{ matrix.use-cross }}
|
||||
args: --release --target ${{ matrix.target }}
|
||||
|
||||
- name: List target output files
|
||||
run: ls -lR ./target
|
||||
|
||||
- name: Upload the binary to release
|
||||
uses: svenstaro/upload-release-action@v1-release
|
||||
with:
|
||||
repo_token: ${{ secrets.PUBLISH_TOKEN }}
|
||||
file: target/release/meilisearch
|
||||
asset_name: meilisearch-linux-armv8
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
asset_name: ${{ matrix.asset_name }}
|
||||
tag: ${{ github.ref }}
|
||||
|
||||
76
.github/workflows/publish-crossbuild.yml
vendored
76
.github/workflows/publish-crossbuild.yml
vendored
@@ -1,76 +0,0 @@
|
||||
name: Publish aarch64 binary
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
publish-aarch64:
|
||||
name: Publish to Github
|
||||
runs-on: ${{ matrix.os }}
|
||||
continue-on-error: false
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- build: aarch64
|
||||
os: ubuntu-18.04
|
||||
target: aarch64-unknown-linux-gnu
|
||||
linker: gcc-aarch64-linux-gnu
|
||||
use-cross: true
|
||||
asset_name: meilisearch-linux-aarch64
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Installing Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
|
||||
- name: APT update
|
||||
run: |
|
||||
sudo apt update
|
||||
|
||||
- name: Install target specific tools
|
||||
if: matrix.use-cross
|
||||
run: |
|
||||
sudo apt-get install -y ${{ matrix.linker }}
|
||||
|
||||
- name: Configure target aarch64 GNU
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
## Environment variable is not passed using env:
|
||||
## LD gold won't work with MUSL
|
||||
# env:
|
||||
# JEMALLOC_SYS_WITH_LG_PAGE: 16
|
||||
# RUSTFLAGS: '-Clink-arg=-fuse-ld=gold'
|
||||
run: |
|
||||
echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config
|
||||
echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||
echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV
|
||||
echo RUSTFLAGS="-Clink-arg=-fuse-ld=gold" >> $GITHUB_ENV
|
||||
|
||||
- name: Cargo build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
use-cross: ${{ matrix.use-cross }}
|
||||
args: --release --target ${{ matrix.target }}
|
||||
|
||||
- name: List target output files
|
||||
run: ls -lR ./target
|
||||
|
||||
- name: Upload the binary to release
|
||||
uses: svenstaro/upload-release-action@v1-release
|
||||
with:
|
||||
repo_token: ${{ secrets.PUBLISH_TOKEN }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
asset_name: ${{ matrix.asset_name }}
|
||||
tag: ${{ github.ref }}
|
||||
12
.github/workflows/publish-deb-brew-pkg.yml
vendored
12
.github/workflows/publish-deb-brew-pkg.yml
vendored
@@ -5,16 +5,25 @@ on:
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
name: Check the version validity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check release validity
|
||||
run: bash .github/scripts/check-release.sh
|
||||
|
||||
debian:
|
||||
name: Publish debian packagge
|
||||
runs-on: ubuntu-18.04
|
||||
needs: check-version
|
||||
steps:
|
||||
- uses: hecrj/setup-rust-action@master
|
||||
with:
|
||||
rust-version: stable
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build deb package
|
||||
run: cargo deb -p meilisearch-http -o target/debian/meilisearch.deb
|
||||
- name: Upload debian pkg to release
|
||||
@@ -30,6 +39,7 @@ jobs:
|
||||
homebrew:
|
||||
name: Bump Homebrew formula
|
||||
runs-on: ubuntu-18.04
|
||||
needs: check-version
|
||||
steps:
|
||||
- name: Create PR to Homebrew
|
||||
uses: mislav/bump-homebrew-formula-action@v1
|
||||
|
||||
71
.github/workflows/publish-docker-images.yml
vendored
Normal file
71
.github/workflows/publish-docker-images.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
---
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * *' # Every day at 4:00am
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
name: Publish tagged images to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Check if the tag has the v<nmumber>.<number>.<number> format. If yes, it means we are publishing an official release.
|
||||
# In this situation, we need to set `output.stable` to create/update the following tags (additionally to the `vX.Y.Z` Docker tag):
|
||||
# - a `vX.Y` (without patch version) Docker tag
|
||||
# - a `latest` Docker tag
|
||||
- name: Check tag format
|
||||
if: github.event_name != 'schedule'
|
||||
id: check-tag-format
|
||||
run: |
|
||||
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
||||
|
||||
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo ::set-output name=stable::true
|
||||
else
|
||||
echo ::set-output name=stable::false
|
||||
fi
|
||||
|
||||
# Check only the validity of the tag for official releases (not for pre-releases or other tags)
|
||||
- name: Check release validity
|
||||
if: github.event_name != 'schedule' && steps.check-tag-format.outputs.stable == 'true'
|
||||
run: bash .github/scripts/check-release.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: github.event_name != 'schedule'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: getmeili/meilisearch
|
||||
# The lastest and `vX.Y` tags are only pushed for the official Meilisearch releases
|
||||
# See https://github.com/docker/metadata-action#latest-tag
|
||||
flavor: latest=false
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
# We do not push tags for the cron jobs, this is only for test purposes
|
||||
push: ${{ github.event_name != 'schedule' }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
30
.github/workflows/publish-docker-latest.yml
vendored
30
.github/workflows/publish-docker-latest.yml
vendored
@@ -1,30 +0,0 @@
|
||||
---
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
name: Publish latest image to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker-latest:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: getmeili/meilisearch:latest
|
||||
39
.github/workflows/publish-docker-tag.yml
vendored
39
.github/workflows/publish-docker-tag.yml
vendored
@@ -1,39 +0,0 @@
|
||||
---
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
name: Publish tagged image to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker-tag:
|
||||
runs-on: docker
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: getmeili/meilisearch
|
||||
flavor: latest=false
|
||||
tags: type=ref,event=tag
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
38
.github/workflows/rust.yml
vendored
38
.github/workflows/rust.yml
vendored
@@ -12,6 +12,7 @@ on:
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
@@ -22,9 +23,10 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: rui314/setup-mold@v1 # Optimize link time
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
uses: Swatinem/rust-cache@v2.0.0
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -36,11 +38,32 @@ jobs:
|
||||
command: test
|
||||
args: --locked --release
|
||||
|
||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||
test-debug:
|
||||
name: Run tests in debug
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: rui314/setup-mold@v1 # Optimize link time
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.0.0
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked
|
||||
|
||||
clippy:
|
||||
name: Run Clippy
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: rui314/setup-mold@v1 # Optimize link time
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
@@ -48,7 +71,7 @@ jobs:
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
uses: Swatinem/rust-cache@v2.0.0
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -59,14 +82,15 @@ jobs:
|
||||
name: Run Rustfmt
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: rui314/setup-mold@v1 # Optimize link time
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
uses: Swatinem/rust-cache@v2.0.0
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
@@ -1,17 +1,22 @@
|
||||
# Contributing
|
||||
|
||||
First, thank you for contributing to MeiliSearch! The goal of this document is to provide everything you need to start contributing to MeiliSearch.
|
||||
First, thank you for contributing to Meilisearch! The goal of this document is to provide everything you need to start contributing to Meilisearch.
|
||||
|
||||
Remember that there are many ways to contribute other than writing code: writing [tutorials or blog posts](https://github.com/meilisearch/awesome-meilisearch), improving [the documentation](https://github.com/meilisearch/documentation), submitting [bug reports](https://github.com/meilisearch/meilisearch/issues/new?assignees=&labels=&template=bug_report.md&title=) and [feature requests](https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal)...
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Assumptions](#assumptions)
|
||||
- [How to Contribute](#how-to-contribute)
|
||||
- [Development Workflow](#development-workflow)
|
||||
- [Git Guidelines](#git-guidelines)
|
||||
- [Release Process (for internal team only)](#release-process-for-internal-team-only)
|
||||
|
||||
## Assumptions
|
||||
|
||||
1. **You're familiar with [Github](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||
2. **You've read the MeiliSearch [documentation](https://docs.meilisearch.com).**
|
||||
3. **You know about the [MeiliSearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
||||
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||
Please use this for help.**
|
||||
|
||||
## How to Contribute
|
||||
@@ -19,21 +24,21 @@ First, thank you for contributing to MeiliSearch! The goal of this document is t
|
||||
1. Ensure your change has an issue! Find an
|
||||
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
||||
* This is where you can get a feel if the change will be accepted or not.
|
||||
2. Once approved, [fork the MeiliSearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own Github account.
|
||||
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own GitHub account.
|
||||
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
||||
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
|
||||
5. Make your changes on your branch.
|
||||
6. [Submit the branch as a Pull Request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) pointing to the `main` branch of the MeiliSearch repository. A maintainer should comment and/or review your Pull Request within a few days. Although depending on the circumstances, it may take longer.
|
||||
6. [Submit the branch as a Pull Request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) pointing to the `main` branch of the Meilisearch repository. A maintainer should comment and/or review your Pull Request within a few days. Although depending on the circumstances, it may take longer.
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Setup and run MeiliSearch
|
||||
### Setup and run Meilisearch
|
||||
|
||||
```bash
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
We recommend using the `--release` flag to test the full performance of MeiliSearch.
|
||||
We recommend using the `--release` flag to test the full performance of Meilisearch.
|
||||
|
||||
### Test
|
||||
|
||||
@@ -41,6 +46,8 @@ We recommend using the `--release` flag to test the full performance of MeiliSea
|
||||
cargo test
|
||||
```
|
||||
|
||||
This command will be triggered to each PR as a requirement for merging it.
|
||||
|
||||
If you get a "Too many open files" error you might want to increase the open file limit using this command:
|
||||
|
||||
```bash
|
||||
@@ -65,7 +72,7 @@ As minimal requirements, your commit message should:
|
||||
|
||||
We don't follow any other convention, but if you want to use one, we recommend [the Chris Beams one](https://chris.beams.io/posts/git-commit/).
|
||||
|
||||
### Github Pull Requests
|
||||
### GitHub Pull Requests
|
||||
|
||||
Some notes on GitHub PRs:
|
||||
|
||||
@@ -75,6 +82,29 @@ Some notes on GitHub PRs:
|
||||
The draft PRs are recommended when you want to show that you are working on something and make your work visible.
|
||||
- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project uses [Bors](https://github.com/bors-ng/bors-ng) to automatically enforce this requirement without the PR author having to rebase manually.
|
||||
|
||||
## Release Process (for internal team only)
|
||||
|
||||
Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/).
|
||||
|
||||
### Automation to rebase and Merge the PRs
|
||||
|
||||
This project integrates a bot that helps us manage pull requests merging.<br>
|
||||
_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md)._
|
||||
|
||||
### How to Publish a new Release
|
||||
|
||||
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/core-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
||||
|
||||
### Release assets
|
||||
|
||||
For each release, the following assets are created:
|
||||
- Binaries for differents platforms (Linux, MacOS, Windows and ARM architectures) are attached to the GitHub release
|
||||
- Binaries are pushed to HomeBrew and APT (not published for RC)
|
||||
- Docker tags are created/updated:
|
||||
- `vX.Y.Z`
|
||||
- `vX.Y` (not published for RC)
|
||||
- `latest` (not published for RC)
|
||||
|
||||
<hr>
|
||||
|
||||
Thank you again for reading this through, we can not wait to begin to work with you if you made your way through this contributing guide ❤️
|
||||
|
||||
2154
Cargo.lock
generated
2154
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
10
Cargo.toml
10
Cargo.toml
@@ -1,9 +1,15 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"meilisearch-http",
|
||||
"meilisearch-error",
|
||||
"meilisearch-types",
|
||||
"meilisearch-lib",
|
||||
"meilisearch-auth",
|
||||
"permissive-json-pointer",
|
||||
]
|
||||
|
||||
resolver = "2"
|
||||
[profile.dev.package.flate2]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.milli]
|
||||
opt-level = 3
|
||||
|
||||
47
Dockerfile
47
Dockerfile
@@ -1,36 +1,22 @@
|
||||
# Compile
|
||||
FROM alpine:3.14 AS compiler
|
||||
FROM rust:alpine3.14 AS compiler
|
||||
|
||||
RUN apk update --quiet \
|
||||
&& apk add -q --no-cache curl build-base
|
||||
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
RUN apk add -q --update-cache --no-cache build-base openssl-dev
|
||||
|
||||
WORKDIR /meilisearch
|
||||
|
||||
COPY Cargo.lock .
|
||||
COPY Cargo.toml .
|
||||
|
||||
COPY meilisearch-auth/Cargo.toml meilisearch-auth/
|
||||
COPY meilisearch-error/Cargo.toml meilisearch-error/
|
||||
COPY meilisearch-http/Cargo.toml meilisearch-http/
|
||||
COPY meilisearch-lib/Cargo.toml meilisearch-lib/
|
||||
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
# Create dummy main.rs files for each workspace member to be able to compile all the dependencies
|
||||
RUN find . -type d -name "meilisearch-*" | xargs -I{} sh -c 'mkdir {}/src; echo "fn main() { }" > {}/src/main.rs;'
|
||||
# Use `cargo build` instead of `cargo vendor` because we need to not only download but compile dependencies too
|
||||
RUN $HOME/.cargo/bin/cargo build --release
|
||||
# Cleanup dummy main.rs files
|
||||
RUN find . -path "*/src/main.rs" -delete
|
||||
|
||||
ARG COMMIT_SHA
|
||||
ARG COMMIT_DATE
|
||||
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE}
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
COPY . .
|
||||
RUN $HOME/.cargo/bin/cargo build --release
|
||||
RUN set -eux; \
|
||||
apkArch="$(apk --print-arch)"; \
|
||||
if [ "$apkArch" = "aarch64" ]; then \
|
||||
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||
fi && \
|
||||
cargo build --release
|
||||
|
||||
# Run
|
||||
FROM alpine:3.14
|
||||
@@ -41,9 +27,20 @@ ENV MEILI_SERVER_PROVIDER docker
|
||||
RUN apk update --quiet \
|
||||
&& apk add -q --no-cache libgcc tini curl
|
||||
|
||||
COPY --from=compiler /meilisearch/target/release/meilisearch .
|
||||
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
||||
# to find.
|
||||
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
||||
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
||||
RUN ln -s /bin/meilisearch /meilisearch
|
||||
|
||||
# This directory should hold all the data related to meilisearch so we're going
|
||||
# to move our PWD in there.
|
||||
# We don't want to put the meilisearch binary
|
||||
WORKDIR /meili_data
|
||||
|
||||
|
||||
EXPOSE 7700/tcp
|
||||
|
||||
ENTRYPOINT ["tini", "--"]
|
||||
CMD ./meilisearch
|
||||
CMD /bin/meilisearch
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019-2021 Meili SAS
|
||||
Copyright (c) 2019-2022 Meili SAS
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
48
README.md
48
README.md
@@ -1,8 +1,8 @@
|
||||
<p align="center">
|
||||
<img src="assets/logo.svg" alt="MeiliSearch" width="200" height="200" />
|
||||
<img src="assets/logo.svg" alt="Meilisearch" width="200" height="200" />
|
||||
</p>
|
||||
|
||||
<h1 align="center">MeiliSearch</h1>
|
||||
<h1 align="center">Meilisearch</h1>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://www.meilisearch.com">Website</a> |
|
||||
@@ -15,17 +15,17 @@
|
||||
</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/meilisearch/MeiliSearch/actions"><img src="https://github.com/meilisearch/MeiliSearch/workflows/Cargo%20test/badge.svg" alt="Build Status"></a>
|
||||
<a href="https://deps.rs/repo/github/meilisearch/MeiliSearch"><img src="https://deps.rs/repo/github/meilisearch/MeiliSearch/status.svg" alt="Dependency status"></a>
|
||||
<a href="https://github.com/meilisearch/MeiliSearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
||||
<a href="https://slack.meilisearch.com"><img src="https://img.shields.io/badge/slack-MeiliSearch-blue.svg?logo=slack" alt="Slack"></a>
|
||||
<a href="https://github.com/meilisearch/MeiliSearch/discussions" alt="Discussions"><img src="https://img.shields.io/badge/github-discussions-red" /></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/actions"><img src="https://github.com/meilisearch/meilisearch/workflows/Cargo%20test/badge.svg" alt="Build Status"></a>
|
||||
<a href="https://deps.rs/repo/github/meilisearch/meilisearch"><img src="https://deps.rs/repo/github/meilisearch/meilisearch/status.svg" alt="Dependency status"></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
||||
<a href="https://slack.meilisearch.com"><img src="https://img.shields.io/badge/slack-meilisearch-blue.svg?logo=slack" alt="Slack"></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/discussions" alt="Discussions"><img src="https://img.shields.io/badge/github-discussions-red" /></a>
|
||||
<a href="https://app.bors.tech/repositories/26457"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">⚡ Lightning Fast, Ultra Relevant, and Typo-Tolerant Search Engine 🔍</p>
|
||||
|
||||
**MeiliSearch** is a powerful, fast, open-source, easy to use and deploy search engine. Both searching and indexing are highly customizable. Features such as typo-tolerance, filters, and synonyms are provided out-of-the-box.
|
||||
**Meilisearch** is a powerful, fast, open-source, easy to use and deploy search engine. Both searching and indexing are highly customizable. Features such as typo-tolerance, filters, and synonyms are provided out-of-the-box.
|
||||
For more information about features go to [our documentation](https://docs.meilisearch.com/).
|
||||
|
||||
<p align="center">
|
||||
@@ -58,16 +58,16 @@ meilisearch
|
||||
#### Docker
|
||||
|
||||
```bash
|
||||
docker run -p 7700:7700 -v "$(pwd)/data.ms:/data.ms" getmeili/meilisearch
|
||||
docker run -p 7700:7700 -v "$(pwd)/meili_data:/meili_data" getmeili/meilisearch
|
||||
```
|
||||
|
||||
#### Announcing a cloud-hosted MeiliSearch
|
||||
#### Announcing a cloud-hosted Meilisearch
|
||||
|
||||
Join the closed beta by filling out this [form](https://meilisearch.typeform.com/to/FtnzvZfh).
|
||||
Join the closed beta by filling out this [form](https://meilisearch.typeform.com/to/VI2cI2rv).
|
||||
|
||||
#### Try MeiliSearch in our Sandbox
|
||||
#### Try Meilisearch in our Sandbox
|
||||
|
||||
Create a MeiliSearch instance in [MeiliSearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 48 hours.
|
||||
Create a Meilisearch instance in [Meilisearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 48 hours.
|
||||
|
||||
#### Run on Digital Ocean
|
||||
|
||||
@@ -99,8 +99,8 @@ curl -L https://install.meilisearch.com | sh
|
||||
If you have the latest stable Rust toolchain installed on your local system, clone the repository and change it to your working directory.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/meilisearch/MeiliSearch.git
|
||||
cd MeiliSearch
|
||||
git clone https://github.com/meilisearch/meilisearch.git
|
||||
cd meilisearch
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
@@ -109,7 +109,7 @@ cargo run --release
|
||||
Let's create an index! If you need a sample dataset, use [this movie database](https://www.notion.so/meilisearch/A-movies-dataset-to-test-Meili-1cbf7c9cfa4247249c40edfa22d7ca87#b5ae399b81834705ba5420ac70358a65). You can also find it in the `datasets/` directory.
|
||||
|
||||
```bash
|
||||
curl -L 'https://bit.ly/2PAcw9l' -o movies.json
|
||||
curl -L https://docs.meilisearch.com/movies.json -o movies.json
|
||||
```
|
||||
|
||||
Now, you're ready to index some data.
|
||||
@@ -161,19 +161,19 @@ curl 'http://127.0.0.1:7700/indexes/movies/search?q=botman+robin&limit=2' | jq
|
||||
|
||||
#### Use the Web Interface
|
||||
|
||||
We also deliver an **out-of-the-box [web interface](https://github.com/meilisearch/mini-dashboard)** in which you can test MeiliSearch interactively.
|
||||
We also deliver an **out-of-the-box [web interface](https://github.com/meilisearch/mini-dashboard)** in which you can test Meilisearch interactively.
|
||||
|
||||
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter MeiliSearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
||||
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter Meilisearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
||||
|
||||
| [See the gif above](#demo)
|
||||
|
||||
## Documentation
|
||||
|
||||
Now that your MeiliSearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
||||
Now that your Meilisearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
||||
|
||||
## Contributing
|
||||
|
||||
Hey! We're glad you're thinking about contributing to MeiliSearch! Feel free to pick an [issue labeled as `good first issue`](https://github.com/meilisearch/MeiliSearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22), and to ask any question you need. Some points might not be clear and we are available to help you!
|
||||
Hey! We're glad you're thinking about contributing to Meilisearch! Feel free to pick an [issue labeled as `good first issue`](https://github.com/meilisearch/meilisearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22), and to ask any question you need. Some points might not be clear and we are available to help you!
|
||||
|
||||
Also, we recommend following the [CONTRIBUTING](./CONTRIBUTING.md) to create your PR.
|
||||
|
||||
@@ -184,8 +184,8 @@ The code in this repository is only concerned with managing multiple indexes, ha
|
||||
Search and indexation are the domain of our core engine, [`milli`](https://github.com/meilisearch/milli), while tokenization is handled by [our `tokenizer` library](https://github.com/meilisearch/tokenizer/).
|
||||
## Telemetry
|
||||
|
||||
MeiliSearch collects anonymous data regarding general usage.
|
||||
This helps us better understand developers' usage of MeiliSearch features.
|
||||
Meilisearch collects anonymous data regarding general usage.
|
||||
This helps us better understand developers' usage of Meilisearch features.
|
||||
|
||||
To find out more on what information we're retrieving, please see our documentation on [Telemetry](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html).
|
||||
|
||||
@@ -193,7 +193,7 @@ This program is optional, you can disable these analytics by using the `MEILI_NO
|
||||
|
||||
## Feature request
|
||||
|
||||
The feature requests are not managed in this repository. Please visit our [dedicated repository](https://github.com/meilisearch/product) to see our work about the MeiliSearch product.
|
||||
The feature requests are not managed in this repository. Please visit our [dedicated repository](https://github.com/meilisearch/product) to see our work about the Meilisearch product.
|
||||
|
||||
If you have a feature request or any feedback about an existing feature, please open [a discussion](https://github.com/meilisearch/product/discussions).
|
||||
Also, feel free to participate in the current discussions, we are looking forward to reading your comments.
|
||||
@@ -202,4 +202,4 @@ Also, feel free to participate in the current discussions, we are looking forwar
|
||||
|
||||
Please visit [this page](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html#contact-us).
|
||||
|
||||
MeiliSearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
Meilisearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
|
||||
10
SECURITY.md
10
SECURITY.md
@@ -1,16 +1,16 @@
|
||||
# Security
|
||||
|
||||
MeiliSearch takes the security of our software products and services seriously.
|
||||
Meilisearch takes the security of our software products and services seriously.
|
||||
|
||||
If you believe you have found a security vulnerability in any MeiliSearch-owned repository, please report it to us as described below.
|
||||
If you believe you have found a security vulnerability in any Meilisearch-owned repository, please report it to us as described below.
|
||||
|
||||
## Suported versions
|
||||
## Supported versions
|
||||
|
||||
As long as we are pre-v1.0, only the latest version of MeiliSearch will be supported with security updates.
|
||||
As long as we are pre-v1.0, only the latest version of Meilisearch will be supported with security updates.
|
||||
|
||||
## Reporting security issues
|
||||
|
||||
⚠️ Please do not report security vulnerabilities through public GitHub issues. ⚠️
|
||||
⚠️ Please do not report security vulnerabilities through public GitHub issues. ⚠️
|
||||
|
||||
Instead, please kindly email us at security@meilisearch.com
|
||||
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
<svg width="360" height="360" viewBox="0 0 360 360" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="logo_main">
|
||||
<rect id="Rectangle" x="107.333" y="0.150146" width="274.315" height="274.315" rx="98.8334" transform="rotate(23 107.333 0.150146)" fill="url(#paint0_linear)"/>
|
||||
<path id="Rectangle_2" fill-rule="evenodd" clip-rule="evenodd" d="M61.3296 230.199C46.2224 194.608 38.6688 176.813 38.208 160.329C37.5286 136.025 47.0175 112.539 64.3891 95.5282C76.1718 83.9904 93.9669 76.4368 129.557 61.3296C165.147 46.2224 182.943 38.6688 199.427 38.208C223.731 37.5286 247.217 47.0175 264.228 64.3891C275.766 76.1718 283.319 93.9669 298.426 129.557C313.534 165.147 321.087 182.943 321.548 199.427C322.227 223.731 312.738 247.217 295.367 264.228C283.584 275.766 265.789 283.319 230.199 298.426C194.608 313.534 176.813 321.087 160.329 321.548C136.025 322.227 112.539 312.738 95.5282 295.367C83.9903 283.584 76.4368 265.789 61.3296 230.199Z" fill="url(#paint1_linear)"/>
|
||||
<path id="m" fill-rule="evenodd" clip-rule="evenodd" d="M219.568 130.748C242.363 130.748 259.263 147.451 259.263 174.569V229.001H227.232V179.678C227.232 166.119 220.747 159.634 210.136 159.634C205.223 159.634 200.311 161.796 195.595 167.494C195.791 169.852 195.988 172.21 195.988 174.569V229.001H164.154V179.678C164.154 166.119 157.472 159.634 147.057 159.634C142.145 159.634 137.429 161.992 132.712 168.084V229.001H100.878V133.695H132.712V139.394C139.197 133.892 145.878 130.748 156.49 130.748C168.477 130.748 178.695 135.267 185.769 143.52C195.791 134.678 205.42 130.748 219.568 130.748Z" fill="white"/>
|
||||
</g>
|
||||
<svg width="300" height="300" viewBox="0 0 300 300" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 237L55.426 96.7678C63.2367 77.0063 82.499 64 103.955 64H137.371L81.9447 204.232C74.1341 223.993 54.8717 237 33.4156 237H0Z" fill="url(#paint0_linear_1_898)"/>
|
||||
<path d="M81.3123 237L136.738 96.7682C144.549 77.0067 163.811 64.0004 185.267 64.0004H218.683L163.257 204.232C155.446 223.994 136.184 237 114.728 237H81.3123Z" fill="url(#paint1_linear_1_898)"/>
|
||||
<path d="M162.629 237L218.055 96.7682C225.866 77.0067 245.128 64.0004 266.584 64.0004H300L244.574 204.232C236.763 223.994 217.501 237 196.045 237H162.629Z" fill="url(#paint2_linear_1_898)"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear" x1="-13.6248" y1="129.208" x2="244.49" y2="403.522" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#E41359"/>
|
||||
<stop offset="1" stop-color="#F23C79"/>
|
||||
<linearGradient id="paint0_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear" x1="11.0088" y1="111.65" x2="111.65" y2="348.747" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#24222F"/>
|
||||
<stop offset="1" stop-color="#2B2937"/>
|
||||
<linearGradient id="paint1_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint2_linear_1_898" x1="300.001" y1="50.7858" x2="1.63474" y2="221.244" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#FF5CAA"/>
|
||||
<stop offset="1" stop-color="#FF4E62"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 1.3 KiB |
@@ -3,7 +3,8 @@ status = [
|
||||
'Tests on macos-latest',
|
||||
'Tests on windows-latest',
|
||||
'Run Clippy',
|
||||
'Run Rustfmt'
|
||||
'Run Rustfmt',
|
||||
'Run tests in debug',
|
||||
]
|
||||
pr_status = ['Milestone Check']
|
||||
# 3 hours timeout
|
||||
|
||||
@@ -67,16 +67,16 @@ semverLT() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# Get a token from https://github.com/settings/tokens to increasae rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
|
||||
# Create GITHUB_PAT enviroment variable once you aquired the token to start using it
|
||||
# Get a token from https://github.com/settings/tokens to increase rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
|
||||
# Create GITHUB_PAT environment variable once you acquired the token to start using it
|
||||
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||
get_latest() {
|
||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||
|
||||
if [ -z "$GITHUB_PAT" ]; then
|
||||
curl -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file" || return 1
|
||||
if [ -z "$GITHUB_PAT" ]; then
|
||||
curl -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file" || return 1
|
||||
else
|
||||
curl -H "Authorization: token $GITHUB_PAT" -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file" || return 1
|
||||
curl -H "Authorization: token $GITHUB_PAT" -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file" || return 1
|
||||
fi
|
||||
|
||||
releases=$(cat "$temp_file" | \
|
||||
@@ -89,7 +89,7 @@ get_latest() {
|
||||
latest=''
|
||||
current_tag=''
|
||||
for release_info in $releases; do
|
||||
if [ $i -eq 0 ]; then # Cheking tag_name
|
||||
if [ $i -eq 0 ]; then # Checking tag_name
|
||||
if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then # If it's not an alpha or beta release
|
||||
current_tag=$release_info
|
||||
else
|
||||
@@ -120,7 +120,7 @@ get_latest() {
|
||||
done
|
||||
|
||||
rm -f "$temp_file"
|
||||
echo $latest
|
||||
return 0
|
||||
}
|
||||
|
||||
# Gets the OS by setting the $os variable
|
||||
@@ -148,11 +148,18 @@ get_os() {
|
||||
get_archi() {
|
||||
architecture=$(uname -m)
|
||||
case "$architecture" in
|
||||
'x86_64' | 'amd64' | 'arm64')
|
||||
'x86_64' | 'amd64' )
|
||||
archi='amd64'
|
||||
;;
|
||||
'arm64')
|
||||
if [ $os = 'macos' ]; then # MacOS M1
|
||||
archi='amd64'
|
||||
else
|
||||
archi='aarch64'
|
||||
fi
|
||||
;;
|
||||
'aarch64')
|
||||
archi='armv8'
|
||||
archi='aarch64'
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
@@ -161,7 +168,7 @@ get_archi() {
|
||||
}
|
||||
|
||||
success_usage() {
|
||||
printf "$GREEN%s\n$DEFAULT" "MeiliSearch $latest binary successfully downloaded as '$binary_name' file."
|
||||
printf "$GREEN%s\n$DEFAULT" "Meilisearch $latest binary successfully downloaded as '$binary_name' file."
|
||||
echo ''
|
||||
echo 'Run it:'
|
||||
echo ' $ ./meilisearch'
|
||||
@@ -169,47 +176,65 @@ success_usage() {
|
||||
echo ' $ ./meilisearch --help'
|
||||
}
|
||||
|
||||
failure_usage() {
|
||||
printf "$RED%s\n$DEFAULT" 'ERROR: MeiliSearch binary is not available for your OS distribution or your architecture yet.'
|
||||
not_available_failure_usage() {
|
||||
printf "$RED%s\n$DEFAULT" 'ERROR: Meilisearch binary is not available for your OS distribution or your architecture yet.'
|
||||
echo ''
|
||||
echo 'However, you can easily compile the binary from the source files.'
|
||||
echo 'Follow the steps at the page ("Source" tab): https://docs.meilisearch.com/learn/getting_started/installation.html'
|
||||
}
|
||||
|
||||
fetch_release_failure_usage() {
|
||||
echo ''
|
||||
printf "$RED%s\n$DEFAULT" 'ERROR: Impossible to get the latest stable version of Meilisearch.'
|
||||
echo 'Please let us know about this issue: https://github.com/meilisearch/meilisearch/issues/new/choose'
|
||||
}
|
||||
|
||||
# MAIN
|
||||
latest="$(get_latest)"
|
||||
|
||||
# Fill $latest variable
|
||||
if ! get_latest; then
|
||||
fetch_release_failure_usage # TO CHANGE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$latest" = '' ]; then
|
||||
echo ''
|
||||
echo 'Impossible to get the latest stable version of MeiliSearch.'
|
||||
echo 'Please let us know about this issue: https://github.com/meilisearch/MeiliSearch/issues/new/choose'
|
||||
fetch_release_failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Fill $os variable
|
||||
if ! get_os; then
|
||||
failure_usage
|
||||
not_available_failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Fill $archi variable
|
||||
if ! get_archi; then
|
||||
failure_usage
|
||||
not_available_failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Downloading MeiliSearch binary $latest for $os, architecture $archi..."
|
||||
echo "Downloading Meilisearch binary $latest for $os, architecture $archi..."
|
||||
case "$os" in
|
||||
'windows')
|
||||
release_file="meilisearch-$os-$archi.exe"
|
||||
binary_name='meilisearch.exe'
|
||||
binary_name='meilisearch.exe'
|
||||
|
||||
;;
|
||||
*)
|
||||
release_file="meilisearch-$os-$archi"
|
||||
binary_name='meilisearch'
|
||||
*)
|
||||
release_file="meilisearch-$os-$archi"
|
||||
binary_name='meilisearch'
|
||||
|
||||
esac
|
||||
link="https://github.com/meilisearch/MeiliSearch/releases/download/$latest/$release_file"
|
||||
curl -OL "$link"
|
||||
|
||||
# Fetch the Meilisearch binary
|
||||
link="https://github.com/meilisearch/meilisearch/releases/download/$latest/$release_file"
|
||||
curl --fail -OL "$link"
|
||||
if [ $? -ne 0 ]; then
|
||||
fetch_release_failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mv "$release_file" "$binary_name"
|
||||
chmod 744 "$binary_name"
|
||||
success_usage
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
[package]
|
||||
name = "meilisearch-auth"
|
||||
version = "0.25.0"
|
||||
edition = "2018"
|
||||
version = "0.28.1"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
enum-iterator = "0.7.0"
|
||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||
sha2 = "0.9.6"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||
hmac = "0.12.1"
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.31.2" }
|
||||
rand = "0.8.4"
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
thiserror = "1.0.28"
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||
sha2 = "0.10.2"
|
||||
thiserror = "1.0.30"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
|
||||
@@ -1,19 +1,24 @@
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::hash::Hash;
|
||||
|
||||
#[derive(IntoEnumIterator, Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[derive(IntoEnumIterator, Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash)]
|
||||
#[repr(u8)]
|
||||
pub enum Action {
|
||||
#[serde(rename = "*")]
|
||||
All = 0,
|
||||
All = actions::ALL,
|
||||
#[serde(rename = "search")]
|
||||
Search = actions::SEARCH,
|
||||
#[serde(rename = "documents.*")]
|
||||
DocumentsAll = actions::DOCUMENTS_ALL,
|
||||
#[serde(rename = "documents.add")]
|
||||
DocumentsAdd = actions::DOCUMENTS_ADD,
|
||||
#[serde(rename = "documents.get")]
|
||||
DocumentsGet = actions::DOCUMENTS_GET,
|
||||
#[serde(rename = "documents.delete")]
|
||||
DocumentsDelete = actions::DOCUMENTS_DELETE,
|
||||
#[serde(rename = "indexes.*")]
|
||||
IndexesAll = actions::INDEXES_ALL,
|
||||
#[serde(rename = "indexes.create")]
|
||||
IndexesAdd = actions::INDEXES_CREATE,
|
||||
#[serde(rename = "indexes.get")]
|
||||
@@ -22,42 +27,65 @@ pub enum Action {
|
||||
IndexesUpdate = actions::INDEXES_UPDATE,
|
||||
#[serde(rename = "indexes.delete")]
|
||||
IndexesDelete = actions::INDEXES_DELETE,
|
||||
#[serde(rename = "tasks.*")]
|
||||
TasksAll = actions::TASKS_ALL,
|
||||
#[serde(rename = "tasks.get")]
|
||||
TasksGet = actions::TASKS_GET,
|
||||
#[serde(rename = "settings.*")]
|
||||
SettingsAll = actions::SETTINGS_ALL,
|
||||
#[serde(rename = "settings.get")]
|
||||
SettingsGet = actions::SETTINGS_GET,
|
||||
#[serde(rename = "settings.update")]
|
||||
SettingsUpdate = actions::SETTINGS_UPDATE,
|
||||
#[serde(rename = "stats.*")]
|
||||
StatsAll = actions::STATS_ALL,
|
||||
#[serde(rename = "stats.get")]
|
||||
StatsGet = actions::STATS_GET,
|
||||
#[serde(rename = "dumps.*")]
|
||||
DumpsAll = actions::DUMPS_ALL,
|
||||
#[serde(rename = "dumps.create")]
|
||||
DumpsCreate = actions::DUMPS_CREATE,
|
||||
#[serde(rename = "dumps.get")]
|
||||
DumpsGet = actions::DUMPS_GET,
|
||||
#[serde(rename = "version")]
|
||||
Version = actions::VERSION,
|
||||
#[serde(rename = "keys.create")]
|
||||
KeysAdd = actions::KEYS_CREATE,
|
||||
#[serde(rename = "keys.get")]
|
||||
KeysGet = actions::KEYS_GET,
|
||||
#[serde(rename = "keys.update")]
|
||||
KeysUpdate = actions::KEYS_UPDATE,
|
||||
#[serde(rename = "keys.delete")]
|
||||
KeysDelete = actions::KEYS_DELETE,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
pub fn from_repr(repr: u8) -> Option<Self> {
|
||||
use actions::*;
|
||||
match repr {
|
||||
0 => Some(Self::All),
|
||||
ALL => Some(Self::All),
|
||||
SEARCH => Some(Self::Search),
|
||||
DOCUMENTS_ALL => Some(Self::DocumentsAll),
|
||||
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
|
||||
DOCUMENTS_GET => Some(Self::DocumentsGet),
|
||||
DOCUMENTS_DELETE => Some(Self::DocumentsDelete),
|
||||
INDEXES_ALL => Some(Self::IndexesAll),
|
||||
INDEXES_CREATE => Some(Self::IndexesAdd),
|
||||
INDEXES_GET => Some(Self::IndexesGet),
|
||||
INDEXES_UPDATE => Some(Self::IndexesUpdate),
|
||||
INDEXES_DELETE => Some(Self::IndexesDelete),
|
||||
TASKS_ALL => Some(Self::TasksAll),
|
||||
TASKS_GET => Some(Self::TasksGet),
|
||||
SETTINGS_ALL => Some(Self::SettingsAll),
|
||||
SETTINGS_GET => Some(Self::SettingsGet),
|
||||
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||
STATS_ALL => Some(Self::StatsAll),
|
||||
STATS_GET => Some(Self::StatsGet),
|
||||
DUMPS_ALL => Some(Self::DumpsAll),
|
||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||
DUMPS_GET => Some(Self::DumpsGet),
|
||||
VERSION => Some(Self::Version),
|
||||
KEYS_CREATE => Some(Self::KeysAdd),
|
||||
KEYS_GET => Some(Self::KeysGet),
|
||||
KEYS_UPDATE => Some(Self::KeysUpdate),
|
||||
KEYS_DELETE => Some(Self::KeysDelete),
|
||||
_otherwise => None,
|
||||
}
|
||||
}
|
||||
@@ -65,40 +93,59 @@ impl Action {
|
||||
pub fn repr(&self) -> u8 {
|
||||
use actions::*;
|
||||
match self {
|
||||
Self::All => 0,
|
||||
Self::All => ALL,
|
||||
Self::Search => SEARCH,
|
||||
Self::DocumentsAll => DOCUMENTS_ALL,
|
||||
Self::DocumentsAdd => DOCUMENTS_ADD,
|
||||
Self::DocumentsGet => DOCUMENTS_GET,
|
||||
Self::DocumentsDelete => DOCUMENTS_DELETE,
|
||||
Self::IndexesAll => INDEXES_ALL,
|
||||
Self::IndexesAdd => INDEXES_CREATE,
|
||||
Self::IndexesGet => INDEXES_GET,
|
||||
Self::IndexesUpdate => INDEXES_UPDATE,
|
||||
Self::IndexesDelete => INDEXES_DELETE,
|
||||
Self::TasksAll => TASKS_ALL,
|
||||
Self::TasksGet => TASKS_GET,
|
||||
Self::SettingsAll => SETTINGS_ALL,
|
||||
Self::SettingsGet => SETTINGS_GET,
|
||||
Self::SettingsUpdate => SETTINGS_UPDATE,
|
||||
Self::StatsAll => STATS_ALL,
|
||||
Self::StatsGet => STATS_GET,
|
||||
Self::DumpsAll => DUMPS_ALL,
|
||||
Self::DumpsCreate => DUMPS_CREATE,
|
||||
Self::DumpsGet => DUMPS_GET,
|
||||
Self::Version => VERSION,
|
||||
Self::KeysAdd => KEYS_CREATE,
|
||||
Self::KeysGet => KEYS_GET,
|
||||
Self::KeysUpdate => KEYS_UPDATE,
|
||||
Self::KeysDelete => KEYS_DELETE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod actions {
|
||||
pub(crate) const ALL: u8 = 0;
|
||||
pub const SEARCH: u8 = 1;
|
||||
pub const DOCUMENTS_ADD: u8 = 2;
|
||||
pub const DOCUMENTS_GET: u8 = 3;
|
||||
pub const DOCUMENTS_DELETE: u8 = 4;
|
||||
pub const INDEXES_CREATE: u8 = 5;
|
||||
pub const INDEXES_GET: u8 = 6;
|
||||
pub const INDEXES_UPDATE: u8 = 7;
|
||||
pub const INDEXES_DELETE: u8 = 8;
|
||||
pub const TASKS_GET: u8 = 9;
|
||||
pub const SETTINGS_GET: u8 = 10;
|
||||
pub const SETTINGS_UPDATE: u8 = 11;
|
||||
pub const STATS_GET: u8 = 12;
|
||||
pub const DUMPS_CREATE: u8 = 13;
|
||||
pub const DUMPS_GET: u8 = 14;
|
||||
pub const VERSION: u8 = 15;
|
||||
pub const DOCUMENTS_ALL: u8 = 2;
|
||||
pub const DOCUMENTS_ADD: u8 = 3;
|
||||
pub const DOCUMENTS_GET: u8 = 4;
|
||||
pub const DOCUMENTS_DELETE: u8 = 5;
|
||||
pub const INDEXES_ALL: u8 = 6;
|
||||
pub const INDEXES_CREATE: u8 = 7;
|
||||
pub const INDEXES_GET: u8 = 8;
|
||||
pub const INDEXES_UPDATE: u8 = 9;
|
||||
pub const INDEXES_DELETE: u8 = 10;
|
||||
pub const TASKS_ALL: u8 = 11;
|
||||
pub const TASKS_GET: u8 = 12;
|
||||
pub const SETTINGS_ALL: u8 = 13;
|
||||
pub const SETTINGS_GET: u8 = 14;
|
||||
pub const SETTINGS_UPDATE: u8 = 15;
|
||||
pub const STATS_ALL: u8 = 16;
|
||||
pub const STATS_GET: u8 = 17;
|
||||
pub const DUMPS_ALL: u8 = 18;
|
||||
pub const DUMPS_CREATE: u8 = 19;
|
||||
pub const VERSION: u8 = 20;
|
||||
pub const KEYS_CREATE: u8 = 21;
|
||||
pub const KEYS_GET: u8 = 22;
|
||||
pub const KEYS_UPDATE: u8 = 23;
|
||||
pub const KEYS_DELETE: u8 = 24;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use serde_json::Deserializer;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufRead;
|
||||
use std::io::BufReader;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
@@ -10,7 +11,10 @@ const KEYS_PATH: &str = "keys";
|
||||
|
||||
impl AuthController {
|
||||
pub fn dump(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> Result<()> {
|
||||
let store = HeedAuthStore::new(&src)?;
|
||||
let mut store = HeedAuthStore::new(&src)?;
|
||||
|
||||
// do not attempt to close the database on drop!
|
||||
store.set_drop_on_close(false);
|
||||
|
||||
let keys_file_path = dst.as_ref().join(KEYS_PATH);
|
||||
|
||||
@@ -33,10 +37,9 @@ impl AuthController {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut reader = BufReader::new(File::open(&keys_file_path)?).lines();
|
||||
while let Some(key) = reader.next().transpose()? {
|
||||
let key = serde_json::from_str(&key)?;
|
||||
store.put_api_key(key)?;
|
||||
let reader = BufReader::new(File::open(&keys_file_path)?);
|
||||
for key in Deserializer::from_reader(reader).into_iter() {
|
||||
store.put_api_key(key?)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::error::Error;
|
||||
|
||||
use meilisearch_error::ErrorCode;
|
||||
use meilisearch_error::{internal_error, Code};
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::internal_error;
|
||||
use serde_json::Value;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
||||
@@ -10,22 +10,32 @@ pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
||||
pub enum AuthControllerError {
|
||||
#[error("`{0}` field is mandatory.")]
|
||||
MissingParameter(&'static str),
|
||||
#[error("actions field value `{0}` is invalid. It should be an array of string representing action names.")]
|
||||
#[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
|
||||
InvalidApiKeyActions(Value),
|
||||
#[error("indexes field value `{0}` is invalid. It should be an array of string representing index names.")]
|
||||
#[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
|
||||
InvalidApiKeyIndexes(Value),
|
||||
#[error("expiresAt field value `{0}` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'.")]
|
||||
#[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
|
||||
InvalidApiKeyExpiresAt(Value),
|
||||
#[error("description field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
||||
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
||||
InvalidApiKeyDescription(Value),
|
||||
#[error(
|
||||
"`name` field value `{0}` is invalid. It should be a string or specified as a null value."
|
||||
)]
|
||||
InvalidApiKeyName(Value),
|
||||
#[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")]
|
||||
InvalidApiKeyUid(Value),
|
||||
#[error("API key `{0}` not found.")]
|
||||
ApiKeyNotFound(String),
|
||||
#[error("`uid` field value `{0}` is already an existing API key.")]
|
||||
ApiKeyAlreadyExists(String),
|
||||
#[error("The `{0}` field cannot be modified for the given resource.")]
|
||||
ImmutableField(String),
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(Box<dyn Error + Send + Sync + 'static>),
|
||||
}
|
||||
|
||||
internal_error!(
|
||||
AuthControllerError: heed::Error,
|
||||
AuthControllerError: milli::heed::Error,
|
||||
std::io::Error,
|
||||
serde_json::Error,
|
||||
std::str::Utf8Error
|
||||
@@ -39,7 +49,11 @@ impl ErrorCode for AuthControllerError {
|
||||
Self::InvalidApiKeyIndexes(_) => Code::InvalidApiKeyIndexes,
|
||||
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
|
||||
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
|
||||
Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName,
|
||||
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
|
||||
Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid,
|
||||
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
|
||||
Self::ImmutableField(_) => Code::ImmutableField,
|
||||
Self::Internal(_) => Code::Internal,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,56 @@
|
||||
use crate::action::Action;
|
||||
use crate::error::{AuthControllerError, Result};
|
||||
use crate::store::{KeyId, KEY_ID_LENGTH};
|
||||
use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};
|
||||
use rand::Rng;
|
||||
use crate::store::KeyId;
|
||||
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{from_value, Value};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::{format_description, time};
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct Key {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
pub id: KeyId,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub name: Option<String>,
|
||||
pub uid: KeyId,
|
||||
pub actions: Vec<Action>,
|
||||
pub indexes: Vec<String>,
|
||||
pub expires_at: Option<DateTime<Utc>>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
pub indexes: Vec<StarOr<IndexUid>>,
|
||||
#[serde(with = "time::serde::rfc3339::option")]
|
||||
pub expires_at: Option<OffsetDateTime>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub created_at: OffsetDateTime,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub updated_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Key {
|
||||
pub fn create_from_value(value: Value) -> Result<Self> {
|
||||
let description = value
|
||||
.get("description")
|
||||
.map(|des| {
|
||||
from_value(des.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))
|
||||
})
|
||||
.transpose()?;
|
||||
let name = match value.get("name") {
|
||||
None | Some(Value::Null) => None,
|
||||
Some(des) => from_value(des.clone())
|
||||
.map(Some)
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()))?,
|
||||
};
|
||||
|
||||
let id = generate_id();
|
||||
let description = match value.get("description") {
|
||||
None | Some(Value::Null) => None,
|
||||
Some(des) => from_value(des.clone())
|
||||
.map(Some)
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
|
||||
};
|
||||
|
||||
let uid = value.get("uid").map_or_else(
|
||||
|| Ok(Uuid::new_v4()),
|
||||
|uid| {
|
||||
from_value(uid.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyUid(uid.clone()))
|
||||
},
|
||||
)?;
|
||||
|
||||
let actions = value
|
||||
.get("actions")
|
||||
@@ -51,12 +73,13 @@ impl Key {
|
||||
.map(parse_expiration_date)
|
||||
.ok_or(AuthControllerError::MissingParameter("expiresAt"))??;
|
||||
|
||||
let created_at = Utc::now();
|
||||
let updated_at = Utc::now();
|
||||
let created_at = OffsetDateTime::now_utc();
|
||||
let updated_at = created_at;
|
||||
|
||||
Ok(Self {
|
||||
name,
|
||||
description,
|
||||
id,
|
||||
uid,
|
||||
actions,
|
||||
indexes,
|
||||
expires_at,
|
||||
@@ -72,83 +95,100 @@ impl Key {
|
||||
self.description = des?;
|
||||
}
|
||||
|
||||
if let Some(act) = value.get("actions") {
|
||||
let act = from_value(act.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone()));
|
||||
self.actions = act?;
|
||||
if let Some(des) = value.get("name") {
|
||||
let des = from_value(des.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()));
|
||||
self.name = des?;
|
||||
}
|
||||
|
||||
if let Some(ind) = value.get("indexes") {
|
||||
let ind = from_value(ind.clone())
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone()));
|
||||
self.indexes = ind?;
|
||||
if value.get("uid").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("uid".to_string()));
|
||||
}
|
||||
|
||||
if let Some(exp) = value.get("expiresAt") {
|
||||
self.expires_at = parse_expiration_date(exp)?;
|
||||
if value.get("actions").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("actions".to_string()));
|
||||
}
|
||||
|
||||
self.updated_at = Utc::now();
|
||||
if value.get("indexes").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("indexes".to_string()));
|
||||
}
|
||||
|
||||
if value.get("expiresAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("expiresAt".to_string()));
|
||||
}
|
||||
|
||||
if value.get("createdAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("createdAt".to_string()));
|
||||
}
|
||||
|
||||
if value.get("updatedAt").is_some() {
|
||||
return Err(AuthControllerError::ImmutableField("updatedAt".to_string()));
|
||||
}
|
||||
|
||||
self.updated_at = OffsetDateTime::now_utc();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn default_admin() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
|
||||
id: generate_id(),
|
||||
name: Some("Default Admin API Key".to_string()),
|
||||
description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::All],
|
||||
indexes: vec!["*".to_string()],
|
||||
indexes: vec![StarOr::Star],
|
||||
expires_at: None,
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn default_search() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
description: Some(
|
||||
"Default Search API Key (Use it to search from the frontend)".to_string(),
|
||||
),
|
||||
id: generate_id(),
|
||||
name: Some("Default Search API Key".to_string()),
|
||||
description: Some("Use it to search from the frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::Search],
|
||||
indexes: vec!["*".to_string()],
|
||||
indexes: vec![StarOr::Star],
|
||||
expires_at: None,
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a printable key of 64 characters using thread_rng.
|
||||
fn generate_id() -> [u8; KEY_ID_LENGTH] {
|
||||
const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut bytes = [0; KEY_ID_LENGTH];
|
||||
for byte in bytes.iter_mut() {
|
||||
*byte = CHARSET[rng.gen_range(0..CHARSET.len())];
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn parse_expiration_date(value: &Value) -> Result<Option<DateTime<Utc>>> {
|
||||
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
|
||||
match value {
|
||||
Value::String(string) => DateTime::parse_from_rfc3339(string)
|
||||
.map(|d| d.into())
|
||||
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
|
||||
.or_else(|_| {
|
||||
NaiveDateTime::parse_from_str(string, "%Y-%m-%dT%H:%M:%S")
|
||||
.map(|naive| DateTime::from_utc(naive, Utc))
|
||||
PrimitiveDateTime::parse(
|
||||
string,
|
||||
format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
||||
),
|
||||
).map(|datetime| datetime.assume_utc())
|
||||
})
|
||||
.or_else(|_| {
|
||||
NaiveDate::parse_from_str(string, "%Y-%m-%d")
|
||||
.map(|naive| DateTime::from_utc(naive.and_hms(0, 0, 0), Utc))
|
||||
PrimitiveDateTime::parse(
|
||||
string,
|
||||
format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
||||
),
|
||||
).map(|datetime| datetime.assume_utc())
|
||||
})
|
||||
.or_else(|_| {
|
||||
Date::parse(string, format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]"
|
||||
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc())
|
||||
})
|
||||
.map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
||||
// check if the key is already expired.
|
||||
.and_then(|d| {
|
||||
if d > Utc::now() {
|
||||
if d > OffsetDateTime::now_utc() {
|
||||
Ok(d)
|
||||
} else {
|
||||
Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone()))
|
||||
|
||||
@@ -4,17 +4,22 @@ pub mod error;
|
||||
mod key;
|
||||
mod store;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::str::from_utf8;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use sha2::{Digest, Sha256};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub use action::{actions, Action};
|
||||
use error::{AuthControllerError, Result};
|
||||
pub use key::Key;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use store::generate_key_as_hexa;
|
||||
pub use store::open_auth_store_env;
|
||||
use store::HeedAuthStore;
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -37,59 +42,91 @@ impl AuthController {
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create_key(&self, value: Value) -> Result<Key> {
|
||||
pub fn create_key(&self, value: Value) -> Result<Key> {
|
||||
let key = Key::create_from_value(value)?;
|
||||
self.store.put_api_key(key)
|
||||
match self.store.get_api_key(key.uid)? {
|
||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(
|
||||
key.uid.to_string(),
|
||||
)),
|
||||
None => self.store.put_api_key(key),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_key(&self, key: impl AsRef<str>, value: Value) -> Result<Key> {
|
||||
let mut key = self.get_key(key).await?;
|
||||
pub fn update_key(&self, uid: Uuid, value: Value) -> Result<Key> {
|
||||
let mut key = self.get_key(uid)?;
|
||||
key.update_from_value(value)?;
|
||||
self.store.put_api_key(key)
|
||||
}
|
||||
|
||||
pub async fn get_key(&self, key: impl AsRef<str>) -> Result<Key> {
|
||||
pub fn get_key(&self, uid: Uuid) -> Result<Key> {
|
||||
self.store
|
||||
.get_api_key(&key)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
|
||||
.get_api_key(uid)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))
|
||||
}
|
||||
|
||||
pub fn get_key_filters(&self, key: impl AsRef<str>) -> Result<AuthFilter> {
|
||||
let mut filters = AuthFilter::default();
|
||||
if self
|
||||
.master_key
|
||||
.as_ref()
|
||||
.map_or(false, |master_key| master_key != key.as_ref())
|
||||
{
|
||||
let key = self
|
||||
pub fn get_optional_uid_from_encoded_key(&self, encoded_key: &[u8]) -> Result<Option<Uuid>> {
|
||||
match &self.master_key {
|
||||
Some(master_key) => self
|
||||
.store
|
||||
.get_api_key(&key)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))?;
|
||||
|
||||
if !key.indexes.iter().any(|i| i.as_str() == "*") {
|
||||
filters.indexes = Some(key.indexes);
|
||||
}
|
||||
|
||||
filters.allow_index_creation = key
|
||||
.actions
|
||||
.iter()
|
||||
.any(|&action| action == Action::IndexesAdd || action == Action::All);
|
||||
.get_uid_from_encoded_key(encoded_key, master_key.as_bytes()),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_uid_from_encoded_key(&self, encoded_key: &str) -> Result<Uuid> {
|
||||
self.get_optional_uid_from_encoded_key(encoded_key.as_bytes())?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(encoded_key.to_string()))
|
||||
}
|
||||
|
||||
pub fn get_key_filters(
|
||||
&self,
|
||||
uid: Uuid,
|
||||
search_rules: Option<SearchRules>,
|
||||
) -> Result<AuthFilter> {
|
||||
let mut filters = AuthFilter::default();
|
||||
let key = self
|
||||
.store
|
||||
.get_api_key(uid)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?;
|
||||
|
||||
if !key.indexes.iter().any(|i| i == &StarOr::Star) {
|
||||
filters.search_rules = match search_rules {
|
||||
// Intersect search_rules with parent key authorized indexes.
|
||||
Some(search_rules) => SearchRules::Map(
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules.get_index_search_rules(index.deref()).map(
|
||||
|index_search_rules| {
|
||||
(String::from(index), Some(index_search_rules))
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
}
|
||||
|
||||
filters.allow_index_creation = key
|
||||
.actions
|
||||
.iter()
|
||||
.any(|&action| action == Action::IndexesAdd || action == Action::All);
|
||||
|
||||
Ok(filters)
|
||||
}
|
||||
|
||||
pub async fn list_keys(&self) -> Result<Vec<Key>> {
|
||||
pub fn list_keys(&self) -> Result<Vec<Key>> {
|
||||
self.store.list_api_keys()
|
||||
}
|
||||
|
||||
pub async fn delete_key(&self, key: impl AsRef<str>) -> Result<()> {
|
||||
if self.store.delete_api_key(&key)? {
|
||||
pub fn delete_key(&self, uid: Uuid) -> Result<()> {
|
||||
if self.store.delete_api_key(uid)? {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(AuthControllerError::ApiKeyNotFound(
|
||||
key.as_ref().to_string(),
|
||||
))
|
||||
Err(AuthControllerError::ApiKeyNotFound(uid.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,50 +134,120 @@ impl AuthController {
|
||||
self.master_key.as_ref()
|
||||
}
|
||||
|
||||
pub fn authenticate(&self, token: &[u8], action: Action, index: Option<&[u8]>) -> Result<bool> {
|
||||
if let Some(master_key) = &self.master_key {
|
||||
if let Some((id, exp)) = self
|
||||
.store
|
||||
// check if the key has access to all indexes.
|
||||
.get_expiration_date(token, action, None)?
|
||||
.or(match index {
|
||||
// else check if the key has access to the requested index.
|
||||
Some(index) => self.store.get_expiration_date(token, action, Some(index))?,
|
||||
// or to any index if no index has been requested.
|
||||
None => self.store.prefix_first_expiration_date(token, action)?,
|
||||
})
|
||||
{
|
||||
let id = from_utf8(&id)?;
|
||||
if exp.map_or(true, |exp| Utc::now() < exp)
|
||||
&& generate_key(master_key.as_bytes(), id).as_bytes() == token
|
||||
{
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Generate a valid key from a key id using the current master key.
|
||||
/// Returns None if no master key has been set.
|
||||
pub fn generate_key(&self, uid: Uuid) -> Option<String> {
|
||||
self.master_key
|
||||
.as_ref()
|
||||
.map(|master_key| generate_key_as_hexa(uid, master_key.as_bytes()))
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
/// Check if the provided key is authorized to make a specific action
|
||||
/// without checking if the key is valid.
|
||||
pub fn is_key_authorized(
|
||||
&self,
|
||||
uid: Uuid,
|
||||
action: Action,
|
||||
index: Option<&str>,
|
||||
) -> Result<bool> {
|
||||
match self
|
||||
.store
|
||||
// check if the key has access to all indexes.
|
||||
.get_expiration_date(uid, action, None)?
|
||||
.or(match index {
|
||||
// else check if the key has access to the requested index.
|
||||
Some(index) => {
|
||||
self.store
|
||||
.get_expiration_date(uid, action, Some(index.as_bytes()))?
|
||||
}
|
||||
// or to any index if no index has been requested.
|
||||
None => self.store.prefix_first_expiration_date(uid, action)?,
|
||||
}) {
|
||||
// check expiration date.
|
||||
Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp),
|
||||
// no expiration date.
|
||||
Some(None) => Ok(true),
|
||||
// action or index forbidden.
|
||||
None => Ok(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AuthFilter {
|
||||
pub indexes: Option<Vec<String>>,
|
||||
pub search_rules: SearchRules,
|
||||
pub allow_index_creation: bool,
|
||||
}
|
||||
|
||||
impl Default for AuthFilter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
indexes: None,
|
||||
search_rules: SearchRules::default(),
|
||||
allow_index_creation: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_key(master_key: &[u8], uid: &str) -> String {
|
||||
let key = [uid.as_bytes(), master_key].concat();
|
||||
let sha = Sha256::digest(&key);
|
||||
format!("{}{:x}", uid, sha)
|
||||
/// Transparent wrapper around a list of allowed indexes with the search rules to apply for each.
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum SearchRules {
|
||||
Set(HashSet<String>),
|
||||
Map(HashMap<String, Option<IndexSearchRules>>),
|
||||
}
|
||||
|
||||
impl Default for SearchRules {
|
||||
fn default() -> Self {
|
||||
Self::Set(Some("*".to_string()).into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchRules {
|
||||
pub fn is_index_authorized(&self, index: &str) -> bool {
|
||||
match self {
|
||||
Self::Set(set) => set.contains("*") || set.contains(index),
|
||||
Self::Map(map) => map.contains_key("*") || map.contains_key(index),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
||||
match self {
|
||||
Self::Set(set) => {
|
||||
if set.contains("*") || set.contains(index) {
|
||||
Some(IndexSearchRules::default())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Self::Map(map) => map
|
||||
.get(index)
|
||||
.or_else(|| map.get("*"))
|
||||
.map(|isr| isr.clone().unwrap_or_default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for SearchRules {
|
||||
type Item = (String, IndexSearchRules);
|
||||
type IntoIter = Box<dyn Iterator<Item = Self::Item>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
match self {
|
||||
Self::Set(array) => {
|
||||
Box::new(array.into_iter().map(|i| (i, IndexSearchRules::default())))
|
||||
}
|
||||
Self::Map(map) => {
|
||||
Box::new(map.into_iter().map(|(i, isr)| (i, isr.unwrap_or_default())))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Contains the rules to apply on the top of the search query for a specific index.
|
||||
///
|
||||
/// filter: search filter to apply in addition to query filters.
|
||||
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
|
||||
pub struct IndexSearchRules {
|
||||
pub filter: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
|
||||
@@ -1,42 +1,62 @@
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::HashSet;
|
||||
use std::convert::TryFrom;
|
||||
use std::convert::TryInto;
|
||||
use std::fs::create_dir_all;
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||
use heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||
use enum_iterator::IntoEnumIterator;
|
||||
use hmac::{Hmac, Mac};
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||
use milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||
use sha2::Sha256;
|
||||
use time::OffsetDateTime;
|
||||
use uuid::fmt::Hyphenated;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::error::Result;
|
||||
use super::{Action, Key};
|
||||
|
||||
const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB
|
||||
pub const KEY_ID_LENGTH: usize = 8;
|
||||
const AUTH_DB_PATH: &str = "auth";
|
||||
const KEY_DB_NAME: &str = "api-keys";
|
||||
const KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME: &str = "keyid-action-index-expiration";
|
||||
|
||||
pub type KeyId = [u8; KEY_ID_LENGTH];
|
||||
pub type KeyId = Uuid;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HeedAuthStore {
|
||||
env: Env,
|
||||
env: Arc<Env>,
|
||||
keys: Database<ByteSlice, SerdeJson<Key>>,
|
||||
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<DateTime<Utc>>>>,
|
||||
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<OffsetDateTime>>>,
|
||||
should_close_on_drop: bool,
|
||||
}
|
||||
|
||||
impl Drop for HeedAuthStore {
|
||||
fn drop(&mut self) {
|
||||
if self.should_close_on_drop && Arc::strong_count(&self.env) == 1 {
|
||||
self.env.as_ref().clone().prepare_for_closing();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_auth_store_env(path: &Path) -> milli::heed::Result<milli::heed::Env> {
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(AUTH_STORE_SIZE); // 1GB
|
||||
options.max_dbs(2);
|
||||
options.open(path)
|
||||
}
|
||||
|
||||
impl HeedAuthStore {
|
||||
pub fn new(path: impl AsRef<Path>) -> Result<Self> {
|
||||
let path = path.as_ref().join(AUTH_DB_PATH);
|
||||
create_dir_all(&path)?;
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(AUTH_STORE_SIZE); // 1GB
|
||||
options.max_dbs(2);
|
||||
let env = options.open(path)?;
|
||||
let env = Arc::new(open_auth_store_env(path.as_ref())?);
|
||||
let keys = env.create_database(Some(KEY_DB_NAME))?;
|
||||
let action_keyid_index_expiration =
|
||||
env.create_database(Some(KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME))?;
|
||||
@@ -44,9 +64,14 @@ impl HeedAuthStore {
|
||||
env,
|
||||
keys,
|
||||
action_keyid_index_expiration,
|
||||
should_close_on_drop: true,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_drop_on_close(&mut self, v: bool) {
|
||||
self.should_close_on_drop = v;
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> Result<bool> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
|
||||
@@ -54,33 +79,70 @@ impl HeedAuthStore {
|
||||
}
|
||||
|
||||
pub fn put_api_key(&self, key: Key) -> Result<Key> {
|
||||
let uid = key.uid;
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
self.keys.put(&mut wtxn, &key.id, &key)?;
|
||||
|
||||
let id = key.id;
|
||||
self.keys.put(&mut wtxn, uid.as_bytes(), &key)?;
|
||||
|
||||
// delete key from inverted database before refilling it.
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &uid)?;
|
||||
// create inverted database.
|
||||
let db = self.action_keyid_index_expiration;
|
||||
|
||||
let actions = if key.actions.contains(&Action::All) {
|
||||
// if key.actions contains All, we iterate over all actions.
|
||||
Action::into_enum_iter().collect()
|
||||
} else {
|
||||
key.actions.clone()
|
||||
};
|
||||
let mut actions = HashSet::new();
|
||||
for action in &key.actions {
|
||||
match action {
|
||||
Action::All => actions.extend(Action::into_enum_iter()),
|
||||
Action::DocumentsAll => {
|
||||
actions.extend(
|
||||
[
|
||||
Action::DocumentsGet,
|
||||
Action::DocumentsDelete,
|
||||
Action::DocumentsAdd,
|
||||
]
|
||||
.iter(),
|
||||
);
|
||||
}
|
||||
Action::IndexesAll => {
|
||||
actions.extend(
|
||||
[
|
||||
Action::IndexesAdd,
|
||||
Action::IndexesDelete,
|
||||
Action::IndexesGet,
|
||||
Action::IndexesUpdate,
|
||||
]
|
||||
.iter(),
|
||||
);
|
||||
}
|
||||
Action::SettingsAll => {
|
||||
actions.extend([Action::SettingsGet, Action::SettingsUpdate].iter());
|
||||
}
|
||||
Action::DumpsAll => {
|
||||
actions.insert(Action::DumpsCreate);
|
||||
}
|
||||
Action::TasksAll => {
|
||||
actions.insert(Action::TasksGet);
|
||||
}
|
||||
Action::StatsAll => {
|
||||
actions.insert(Action::StatsGet);
|
||||
}
|
||||
other => {
|
||||
actions.insert(*other);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let no_index_restriction = key.indexes.contains(&"*".to_owned());
|
||||
let no_index_restriction = key.indexes.contains(&StarOr::Star);
|
||||
for action in actions {
|
||||
if no_index_restriction {
|
||||
// If there is no index restriction we put None.
|
||||
db.put(&mut wtxn, &(&id, &action, None), &key.expires_at)?;
|
||||
db.put(&mut wtxn, &(&uid, &action, None), &key.expires_at)?;
|
||||
} else {
|
||||
// else we create a key for each index.
|
||||
for index in key.indexes.iter() {
|
||||
db.put(
|
||||
&mut wtxn,
|
||||
&(&id, &action, Some(index.as_bytes())),
|
||||
&(&uid, &action, Some(index.deref().as_bytes())),
|
||||
&key.expires_at,
|
||||
)?;
|
||||
}
|
||||
@@ -92,24 +154,42 @@ impl HeedAuthStore {
|
||||
Ok(key)
|
||||
}
|
||||
|
||||
pub fn get_api_key(&self, key: impl AsRef<str>) -> Result<Option<Key>> {
|
||||
pub fn get_api_key(&self, uid: Uuid) -> Result<Option<Key>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match try_split_array_at::<_, KEY_ID_LENGTH>(key.as_ref().as_bytes()) {
|
||||
Some((id, _)) => self.keys.get(&rtxn, id).map_err(|e| e.into()),
|
||||
None => Ok(None),
|
||||
}
|
||||
self.keys.get(&rtxn, uid.as_bytes()).map_err(|e| e.into())
|
||||
}
|
||||
|
||||
pub fn delete_api_key(&self, key: impl AsRef<str>) -> Result<bool> {
|
||||
pub fn get_uid_from_encoded_key(
|
||||
&self,
|
||||
encoded_key: &[u8],
|
||||
master_key: &[u8],
|
||||
) -> Result<Option<Uuid>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let uid = self
|
||||
.keys
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.iter(&rtxn)?
|
||||
.filter_map(|res| match res {
|
||||
Ok((uid, _)) => {
|
||||
let (uid, _) = try_split_array_at(uid)?;
|
||||
let uid = Uuid::from_bytes(*uid);
|
||||
if generate_key_as_hexa(uid, master_key).as_bytes() == encoded_key {
|
||||
Some(uid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Err(_) => None,
|
||||
})
|
||||
.next();
|
||||
|
||||
Ok(uid)
|
||||
}
|
||||
|
||||
pub fn delete_api_key(&self, uid: Uuid) -> Result<bool> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let existing = match try_split_array_at(key.as_ref().as_bytes()) {
|
||||
Some((id, _)) => {
|
||||
let existing = self.keys.delete(&mut wtxn, id)?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, id)?;
|
||||
existing
|
||||
}
|
||||
None => false,
|
||||
};
|
||||
let existing = self.keys.delete(&mut wtxn, uid.as_bytes())?;
|
||||
self.delete_key_from_inverted_db(&mut wtxn, &uid)?;
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(existing)
|
||||
@@ -128,48 +208,37 @@ impl HeedAuthStore {
|
||||
|
||||
pub fn get_expiration_date(
|
||||
&self,
|
||||
key: &[u8],
|
||||
uid: Uuid,
|
||||
action: Action,
|
||||
index: Option<&[u8]>,
|
||||
) -> Result<Option<(KeyId, Option<DateTime<Utc>>)>> {
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match try_split_array_at::<_, KEY_ID_LENGTH>(key) {
|
||||
Some((id, _)) => {
|
||||
let tuple = (id, &action, index);
|
||||
Ok(self
|
||||
.action_keyid_index_expiration
|
||||
.get(&rtxn, &tuple)?
|
||||
.map(|expiration| (*id, expiration)))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
let tuple = (&uid, &action, index);
|
||||
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||
}
|
||||
|
||||
pub fn prefix_first_expiration_date(
|
||||
&self,
|
||||
key: &[u8],
|
||||
uid: Uuid,
|
||||
action: Action,
|
||||
) -> Result<Option<(KeyId, Option<DateTime<Utc>>)>> {
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match try_split_array_at::<_, KEY_ID_LENGTH>(key) {
|
||||
Some((id, _)) => {
|
||||
let tuple = (id, &action, None);
|
||||
Ok(self
|
||||
.action_keyid_index_expiration
|
||||
.prefix_iter(&rtxn, &tuple)?
|
||||
.next()
|
||||
.transpose()?
|
||||
.map(|(_, expiration)| (*id, expiration)))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
let tuple = (&uid, &action, None);
|
||||
let exp = self
|
||||
.action_keyid_index_expiration
|
||||
.prefix_iter(&rtxn, &tuple)?
|
||||
.next()
|
||||
.transpose()?
|
||||
.map(|(_, expiration)| expiration);
|
||||
|
||||
Ok(exp)
|
||||
}
|
||||
|
||||
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
|
||||
let mut iter = self
|
||||
.action_keyid_index_expiration
|
||||
.remap_types::<ByteSlice, DecodeIgnore>()
|
||||
.prefix_iter_mut(wtxn, key)?;
|
||||
.prefix_iter_mut(wtxn, key.as_bytes())?;
|
||||
while iter.next().transpose()?.is_some() {
|
||||
// safety: we don't keep references from inside the LMDB database.
|
||||
unsafe { iter.del_current()? };
|
||||
@@ -180,31 +249,32 @@ impl HeedAuthStore {
|
||||
}
|
||||
|
||||
/// Codec allowing to retrieve the expiration date of an action,
|
||||
/// optionnally on a spcific index, for a given key.
|
||||
/// optionally on a specific index, for a given key.
|
||||
pub struct KeyIdActionCodec;
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
let (key_id, action_bytes) = try_split_array_at(bytes)?;
|
||||
let (key_id_bytes, action_bytes) = try_split_array_at(bytes)?;
|
||||
let (action_bytes, index) = match try_split_array_at(action_bytes)? {
|
||||
(action, []) => (action, None),
|
||||
(action, index) => (action, Some(index)),
|
||||
};
|
||||
let key_id = Uuid::from_bytes(*key_id_bytes);
|
||||
let action = Action::from_repr(u8::from_be_bytes(*action_bytes))?;
|
||||
|
||||
Some((*key_id, action, index))
|
||||
Some((key_id, action, index))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(*key_id);
|
||||
bytes.extend_from_slice(key_id.as_bytes());
|
||||
let action_bytes = u8::to_be_bytes(action.repr());
|
||||
bytes.extend_from_slice(&action_bytes);
|
||||
if let Some(index) = index {
|
||||
@@ -215,6 +285,19 @@ impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_key_as_hexa(uid: Uuid, master_key: &[u8]) -> String {
|
||||
// format uid as hyphenated allowing user to generate their own keys.
|
||||
let mut uid_buffer = [0; Hyphenated::LENGTH];
|
||||
let uid = uid.hyphenated().encode_lower(&mut uid_buffer);
|
||||
|
||||
// new_from_slice function never fail.
|
||||
let mut mac = Hmac::<Sha256>::new_from_slice(master_key).unwrap();
|
||||
mac.update(uid.as_bytes());
|
||||
|
||||
let result = mac.finalize();
|
||||
format!("{:x}", result.into_bytes())
|
||||
}
|
||||
|
||||
/// Divides one slice into two at an index, returns `None` if mid is out of bounds.
|
||||
pub fn try_split_at<T>(slice: &[T], mid: usize) -> Option<(&[T], &[T])> {
|
||||
if mid <= slice.len() {
|
||||
|
||||
@@ -1,93 +1,97 @@
|
||||
[package]
|
||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||
description = "MeiliSearch HTTP server"
|
||||
edition = "2018"
|
||||
description = "Meilisearch HTTP server"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "meilisearch-http"
|
||||
version = "0.25.2"
|
||||
version = "0.28.1"
|
||||
|
||||
[[bin]]
|
||||
name = "meilisearch"
|
||||
path = "src/main.rs"
|
||||
|
||||
[build-dependencies]
|
||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
||||
anyhow = { version = "1.0.43", optional = true }
|
||||
cargo_toml = { version = "0.9", optional = true }
|
||||
anyhow = { version = "1.0.56", optional = true }
|
||||
cargo_toml = { version = "0.11.4", optional = true }
|
||||
hex = { version = "0.4.3", optional = true }
|
||||
reqwest = { version = "0.11.4", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
||||
sha-1 = { version = "0.9.8", optional = true }
|
||||
tempfile = { version = "3.2.0", optional = true }
|
||||
vergen = { version = "5.1.15", default-features = false, features = ["git"] }
|
||||
reqwest = { version = "0.11.9", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
||||
sha-1 = { version = "0.10.0", optional = true }
|
||||
static-files = { version = "0.2.3", optional = true }
|
||||
tempfile = { version = "3.3.0", optional = true }
|
||||
vergen = { version = "7.0.0", default-features = false, features = ["git"] }
|
||||
zip = { version = "0.5.13", optional = true }
|
||||
|
||||
[dependencies]
|
||||
actix-cors = { git = "https://github.com/MarinPostma/actix-extras.git", rev = "963ac94d" }
|
||||
actix-web = { version = "4.0.0-beta.9", features = ["rustls"] }
|
||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
||||
# TODO: specifying this dependency so semver doesn't bump to next beta
|
||||
actix-tls = "=3.0.0-beta.5"
|
||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
||||
arc-swap = "1.3.2"
|
||||
async-stream = "0.3.2"
|
||||
async-trait = "0.1.51"
|
||||
actix-cors = "0.6.1"
|
||||
actix-web = { version = "4.0.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
|
||||
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||
anyhow = { version = "1.0.56", features = ["backtrace"] }
|
||||
async-stream = "0.3.3"
|
||||
async-trait = "0.1.52"
|
||||
bstr = "0.2.17"
|
||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
|
||||
byte-unit = { version = "4.0.14", default-features = false, features = ["std", "serde"] }
|
||||
bytes = "1.1.0"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
crossbeam-channel = "0.5.1"
|
||||
clap = { version = "3.1.6", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.2"
|
||||
either = "1.6.1"
|
||||
env_logger = "0.9.0"
|
||||
flate2 = "1.0.21"
|
||||
flate2 = "1.0.22"
|
||||
fst = "0.4.7"
|
||||
futures = "0.3.17"
|
||||
futures-util = "0.3.17"
|
||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||
http = "0.2.4"
|
||||
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
||||
itertools = "0.10.1"
|
||||
futures = "0.3.21"
|
||||
futures-util = "0.3.21"
|
||||
http = "0.2.6"
|
||||
indexmap = { version = "1.8.0", features = ["serde-1"] }
|
||||
itertools = "0.10.3"
|
||||
jsonwebtoken = "8.0.1"
|
||||
log = "0.4.14"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
meilisearch-lib = { path = "../meilisearch-lib" }
|
||||
mime = "0.3.16"
|
||||
num_cpus = "1.13.0"
|
||||
num_cpus = "1.13.1"
|
||||
obkv = "0.2.0"
|
||||
once_cell = "1.8.0"
|
||||
parking_lot = "0.11.2"
|
||||
pin-project = "1.0.8"
|
||||
once_cell = "1.10.0"
|
||||
parking_lot = "0.12.0"
|
||||
pin-project-lite = "0.2.8"
|
||||
platform-dirs = "0.3.0"
|
||||
rand = "0.8.4"
|
||||
rand = "0.8.5"
|
||||
rayon = "1.5.1"
|
||||
regex = "1.5.4"
|
||||
rustls = "0.19.1"
|
||||
segment = { version = "0.1.2", optional = true }
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||
sha2 = "0.9.6"
|
||||
siphasher = "0.3.7"
|
||||
slice-group-by = "0.2.6"
|
||||
structopt = "0.3.25"
|
||||
sysinfo = "0.20.2"
|
||||
tar = "0.4.37"
|
||||
tempfile = "3.2.0"
|
||||
thiserror = "1.0.28"
|
||||
tokio = { version = "1.11.0", features = ["full"] }
|
||||
tokio-stream = "0.1.7"
|
||||
uuid = { version = "0.8.2", features = ["serde"] }
|
||||
regex = "1.5.5"
|
||||
reqwest = { version = "0.11.4", features = ["rustls-tls", "json"], default-features = false }
|
||||
rustls = "0.20.4"
|
||||
rustls-pemfile = "0.3.0"
|
||||
segment = { version = "0.2.0", optional = true }
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde-cs = "0.2.3"
|
||||
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||
sha2 = "0.10.2"
|
||||
siphasher = "0.3.10"
|
||||
slice-group-by = "0.3.0"
|
||||
static-files = { version = "0.2.3", optional = true }
|
||||
sysinfo = "0.23.5"
|
||||
tar = "0.4.38"
|
||||
tempfile = "3.3.0"
|
||||
thiserror = "1.0.30"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.17.0", features = ["full"] }
|
||||
tokio-stream = "0.1.8"
|
||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
walkdir = "2.3.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2.0"
|
||||
actix-rt = "2.7.0"
|
||||
assert-json-diff = "2.0.1"
|
||||
manifest-dir-macros = "0.1.14"
|
||||
maplit = "1.0.2"
|
||||
paste = "1.0.5"
|
||||
serde_url_params = "0.2.1"
|
||||
urlencoding = "2.1.0"
|
||||
yaup = "0.2.0"
|
||||
|
||||
[features]
|
||||
default = ["analytics", "mini-dashboard"]
|
||||
analytics = ["segment"]
|
||||
mini-dashboard = [
|
||||
"actix-web-static-files",
|
||||
"static-files",
|
||||
"anyhow",
|
||||
"cargo_toml",
|
||||
"hex",
|
||||
@@ -96,12 +100,10 @@ mini-dashboard = [
|
||||
"tempfile",
|
||||
"zip",
|
||||
]
|
||||
analytics = ["segment"]
|
||||
default = ["analytics", "mini-dashboard"]
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
tikv-jemallocator = "0.4.1"
|
||||
tikv-jemallocator = "0.4.3"
|
||||
|
||||
[package.metadata.mini-dashboard]
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.7/build.zip"
|
||||
sha1 = "e2feedf271917c4b7b88998eff5aaaea1d3925b9"
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.1/build.zip"
|
||||
sha1 = "05a02ff13c3982091884a3f81d28bf53e72607b2"
|
||||
|
||||
@@ -16,11 +16,11 @@ mod mini_dashboard {
|
||||
use std::io::{Cursor, Read, Write};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use actix_web_static_files::resource_dir;
|
||||
use anyhow::Context;
|
||||
use cargo_toml::Manifest;
|
||||
use reqwest::blocking::get;
|
||||
use sha1::{Digest, Sha1};
|
||||
use static_files::resource_dir;
|
||||
|
||||
pub fn setup_mini_dashboard() -> anyhow::Result<()> {
|
||||
let cargo_manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
||||
|
||||
@@ -29,12 +29,12 @@ pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||
|
||||
/// The MeiliSearch config dir:
|
||||
/// `~/.config/MeiliSearch` on *NIX or *BSD.
|
||||
/// The Meilisearch config dir:
|
||||
/// `~/.config/Meilisearch` on *NIX or *BSD.
|
||||
/// `~/Library/ApplicationSupport` on macOS.
|
||||
/// `%APPDATA` (= `C:\Users%USERNAME%\AppData\Roaming`) on windows.
|
||||
static MEILISEARCH_CONFIG_PATH: Lazy<Option<PathBuf>> =
|
||||
Lazy::new(|| AppDirs::new(Some("MeiliSearch"), false).map(|appdir| appdir.config_dir));
|
||||
Lazy::new(|| AppDirs::new(Some("Meilisearch"), false).map(|appdir| appdir.config_dir));
|
||||
|
||||
fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
|
||||
db_path
|
||||
@@ -44,13 +44,13 @@ fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
|
||||
path.join("instance-uid")
|
||||
.display()
|
||||
.to_string()
|
||||
.replace("/", "-")
|
||||
.replace('/', "-")
|
||||
})
|
||||
.zip(MEILISEARCH_CONFIG_PATH.as_ref())
|
||||
.map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-')))
|
||||
}
|
||||
|
||||
/// Look for the instance-uid in the `data.ms` or in `~/.config/MeiliSearch/path-to-db-instance-uid`
|
||||
/// Look for the instance-uid in the `data.ms` or in `~/.config/Meilisearch/path-to-db-instance-uid`
|
||||
fn find_user_id(db_path: &Path) -> Option<String> {
|
||||
fs::read_to_string(db_path.join("instance-uid"))
|
||||
.ok()
|
||||
@@ -61,7 +61,7 @@ pub trait Analytics: Sync + Send {
|
||||
/// The method used to publish most analytics that do not need to be batched every hours
|
||||
fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>);
|
||||
|
||||
/// This method should be called to aggergate a get search
|
||||
/// This method should be called to aggregate a get search
|
||||
fn get_search(&self, aggregate: SearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post search
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
use std::collections::{BinaryHeap, HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use actix_web::http::header::USER_AGENT;
|
||||
use actix_web::HttpRequest;
|
||||
use http::header::CONTENT_TYPE;
|
||||
use meilisearch_lib::index::{SearchQuery, SearchResult};
|
||||
use meilisearch_auth::SearchRules;
|
||||
use meilisearch_lib::index::{
|
||||
SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
||||
};
|
||||
use meilisearch_lib::index_controller::Stats;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use once_cell::sync::Lazy;
|
||||
@@ -16,6 +20,7 @@ use segment::message::{Identify, Track, User};
|
||||
use segment::{AutoBatcher, Batcher, HttpClient};
|
||||
use serde_json::{json, Value};
|
||||
use sysinfo::{DiskExt, System, SystemExt};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::select;
|
||||
use tokio::sync::mpsc::{self, Receiver, Sender};
|
||||
use uuid::Uuid;
|
||||
@@ -26,6 +31,8 @@ use crate::Opt;
|
||||
|
||||
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
|
||||
|
||||
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
|
||||
|
||||
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
|
||||
fn write_user_id(db_path: &Path, user_id: &str) {
|
||||
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
|
||||
@@ -43,7 +50,8 @@ const SEGMENT_API_KEY: &str = "P3FWhhEsJiEDCuEHpmcN9DHcK4hVfBvb";
|
||||
pub fn extract_user_agents(request: &HttpRequest) -> Vec<String> {
|
||||
request
|
||||
.headers()
|
||||
.get(USER_AGENT)
|
||||
.get(ANALYTICS_HEADER)
|
||||
.or_else(|| request.headers().get(USER_AGENT))
|
||||
.map(|header| header.to_str().ok())
|
||||
.flatten()
|
||||
.unwrap_or("unknown")
|
||||
@@ -73,11 +81,23 @@ impl SegmentAnalytics {
|
||||
let user_id = user_id.unwrap_or_else(|| Uuid::new_v4().to_string());
|
||||
write_user_id(&opt.db_path, &user_id);
|
||||
|
||||
let client = HttpClient::default();
|
||||
let client = reqwest::Client::builder()
|
||||
.connect_timeout(Duration::from_secs(10))
|
||||
.build();
|
||||
|
||||
// if reqwest throws an error we won't be able to send analytics
|
||||
if client.is_err() {
|
||||
return super::MockAnalytics::new(opt);
|
||||
}
|
||||
|
||||
let client = HttpClient::new(
|
||||
client.unwrap(),
|
||||
"https://telemetry.meilisearch.com".to_string(),
|
||||
);
|
||||
let user = User::UserId { user_id };
|
||||
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
|
||||
|
||||
// If MeiliSearch is Launched for the first time:
|
||||
// If Meilisearch is Launched for the first time:
|
||||
// 1. Send an event Launched associated to the user `total_launch`.
|
||||
// 2. Batch an event Launched with the real instance-id and send it in one hour.
|
||||
if first_time_run {
|
||||
@@ -125,11 +145,7 @@ impl SegmentAnalytics {
|
||||
|
||||
impl super::Analytics for SegmentAnalytics {
|
||||
fn publish(&self, event_name: String, mut send: Value, request: Option<&HttpRequest>) {
|
||||
let user_agent = request
|
||||
.map(|req| req.headers().get(USER_AGENT))
|
||||
.flatten()
|
||||
.map(|header| header.to_str().unwrap_or("unknown"))
|
||||
.map(|s| s.split(';').map(str::trim).collect::<Vec<&str>>());
|
||||
let user_agent = request.map(|req| extract_user_agents(req));
|
||||
|
||||
send["user-agent"] = json!(user_agent);
|
||||
let event = Track {
|
||||
@@ -210,10 +226,30 @@ impl Segment {
|
||||
"server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(),
|
||||
})
|
||||
});
|
||||
let infos = json!({
|
||||
"env": opt.env.clone(),
|
||||
"has_snapshot": opt.schedule_snapshot,
|
||||
});
|
||||
// The infos are all cli option except every option containing sensitive information.
|
||||
// We consider an information as sensible if it contains a path, an address or a key.
|
||||
let infos = {
|
||||
// First we see if any sensitive fields were used.
|
||||
let db_path = opt.db_path != PathBuf::from("./data.ms");
|
||||
let import_dump = opt.import_dump.is_some();
|
||||
let dumps_dir = opt.dumps_dir != PathBuf::from("dumps/");
|
||||
let import_snapshot = opt.import_snapshot.is_some();
|
||||
let snapshots_dir = opt.snapshot_dir != PathBuf::from("snapshots/");
|
||||
let http_addr = opt.http_addr != "127.0.0.1:7700";
|
||||
|
||||
let mut infos = serde_json::to_value(opt).unwrap();
|
||||
|
||||
// Then we overwrite all sensitive field with a boolean representing if
|
||||
// the feature was used or not.
|
||||
infos["db_path"] = json!(db_path);
|
||||
infos["import_dump"] = json!(import_dump);
|
||||
infos["dumps_dir"] = json!(dumps_dir);
|
||||
infos["import_snapshot"] = json!(import_snapshot);
|
||||
infos["snapshot_dir"] = json!(snapshots_dir);
|
||||
infos["http_addr"] = json!(http_addr);
|
||||
|
||||
infos
|
||||
};
|
||||
|
||||
let number_of_documents = stats
|
||||
.indexes
|
||||
@@ -259,7 +295,7 @@ impl Segment {
|
||||
}
|
||||
|
||||
async fn tick(&mut self, meilisearch: MeiliSearch) {
|
||||
if let Ok(stats) = meilisearch.get_all_stats(&None).await {
|
||||
if let Ok(stats) = meilisearch.get_all_stats(&SearchRules::default()).await {
|
||||
let _ = self
|
||||
.batcher
|
||||
.push(Identify {
|
||||
@@ -301,6 +337,8 @@ impl Segment {
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SearchAggregator {
|
||||
timestamp: Option<OffsetDateTime>,
|
||||
|
||||
// context
|
||||
user_agents: HashSet<String>,
|
||||
|
||||
@@ -331,11 +369,20 @@ pub struct SearchAggregator {
|
||||
// pagination
|
||||
max_limit: usize,
|
||||
max_offset: usize,
|
||||
|
||||
// formatting
|
||||
highlight_pre_tag: bool,
|
||||
highlight_post_tag: bool,
|
||||
crop_marker: bool,
|
||||
show_matches_position: bool,
|
||||
crop_length: bool,
|
||||
}
|
||||
|
||||
impl SearchAggregator {
|
||||
pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self {
|
||||
let mut ret = Self::default();
|
||||
ret.timestamp = Some(OffsetDateTime::now_utc());
|
||||
|
||||
ret.total_received = 1;
|
||||
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
||||
|
||||
@@ -379,6 +426,12 @@ impl SearchAggregator {
|
||||
ret.max_limit = query.limit;
|
||||
ret.max_offset = query.offset.unwrap_or_default();
|
||||
|
||||
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
|
||||
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
|
||||
ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER();
|
||||
ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH();
|
||||
ret.show_matches_position = query.show_matches_position;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
@@ -389,6 +442,10 @@ impl SearchAggregator {
|
||||
|
||||
/// Aggregate one [SearchAggregator] into another.
|
||||
pub fn aggregate(&mut self, mut other: Self) {
|
||||
if self.timestamp.is_none() {
|
||||
self.timestamp = other.timestamp;
|
||||
}
|
||||
|
||||
// context
|
||||
for user_agent in other.user_agents.into_iter() {
|
||||
self.user_agents.insert(user_agent);
|
||||
@@ -422,6 +479,12 @@ impl SearchAggregator {
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(other.max_limit);
|
||||
self.max_offset = self.max_offset.max(other.max_offset);
|
||||
|
||||
self.highlight_pre_tag |= other.highlight_pre_tag;
|
||||
self.highlight_post_tag |= other.highlight_post_tag;
|
||||
self.crop_marker |= other.crop_marker;
|
||||
self.show_matches_position |= other.show_matches_position;
|
||||
self.crop_length |= other.crop_length;
|
||||
}
|
||||
|
||||
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
||||
@@ -432,7 +495,7 @@ impl SearchAggregator {
|
||||
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = self.time_spent.into_sorted_vec();
|
||||
// We are only intersted by the slowest value of the 99th fastest results
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th as usize);
|
||||
|
||||
let properties = json!({
|
||||
@@ -459,9 +522,17 @@ impl SearchAggregator {
|
||||
"max_limit": self.max_limit,
|
||||
"max_offset": self.max_offset,
|
||||
},
|
||||
"formatting": {
|
||||
"highlight_pre_tag": self.highlight_pre_tag,
|
||||
"highlight_post_tag": self.highlight_post_tag,
|
||||
"crop_marker": self.crop_marker,
|
||||
"show_matches_position": self.show_matches_position,
|
||||
"crop_length": self.crop_length,
|
||||
},
|
||||
});
|
||||
|
||||
Some(Track {
|
||||
timestamp: self.timestamp,
|
||||
user: user.clone(),
|
||||
event: event_name.to_string(),
|
||||
properties,
|
||||
@@ -473,6 +544,8 @@ impl SearchAggregator {
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DocumentsAggregator {
|
||||
timestamp: Option<OffsetDateTime>,
|
||||
|
||||
// set to true when at least one request was received
|
||||
updated: bool,
|
||||
|
||||
@@ -491,6 +564,7 @@ impl DocumentsAggregator {
|
||||
request: &HttpRequest,
|
||||
) -> Self {
|
||||
let mut ret = Self::default();
|
||||
ret.timestamp = Some(OffsetDateTime::now_utc());
|
||||
|
||||
ret.updated = true;
|
||||
ret.user_agents = extract_user_agents(request).into_iter().collect();
|
||||
@@ -500,8 +574,8 @@ impl DocumentsAggregator {
|
||||
let content_type = request
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.map(|s| s.to_str().unwrap_or("unkown"))
|
||||
.unwrap()
|
||||
.and_then(|s| s.to_str().ok())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
ret.content_types.insert(content_type);
|
||||
ret.index_creation = index_creation;
|
||||
@@ -511,15 +585,19 @@ impl DocumentsAggregator {
|
||||
|
||||
/// Aggregate one [DocumentsAggregator] into another.
|
||||
pub fn aggregate(&mut self, other: Self) {
|
||||
if self.timestamp.is_none() {
|
||||
self.timestamp = other.timestamp;
|
||||
}
|
||||
|
||||
self.updated |= other.updated;
|
||||
// we can't create a union because there is no `into_union` method
|
||||
for user_agent in other.user_agents.into_iter() {
|
||||
for user_agent in other.user_agents {
|
||||
self.user_agents.insert(user_agent);
|
||||
}
|
||||
for primary_key in other.primary_keys.into_iter() {
|
||||
for primary_key in other.primary_keys {
|
||||
self.primary_keys.insert(primary_key);
|
||||
}
|
||||
for content_type in other.content_types.into_iter() {
|
||||
for content_type in other.content_types {
|
||||
self.content_types.insert(content_type);
|
||||
}
|
||||
self.index_creation |= other.index_creation;
|
||||
@@ -537,6 +615,7 @@ impl DocumentsAggregator {
|
||||
});
|
||||
|
||||
Some(Track {
|
||||
timestamp: self.timestamp,
|
||||
user: user.clone(),
|
||||
event: event_name.to_string(),
|
||||
properties,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use actix_web as aweb;
|
||||
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
||||
use meilisearch_error::{Code, ErrorCode, ResponseError};
|
||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum MeilisearchHttpError {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AuthenticationError {
|
||||
#[error("The Authorization header is missing. It must use the bearer authorization method.")]
|
||||
MissingAuthorizationHeader,
|
||||
#[error("The provided API key is invalid.")]
|
||||
InvalidToken(String),
|
||||
InvalidToken,
|
||||
// Triggered on configuration error.
|
||||
#[error("An internal error has occurred. `Irretrievable state`.")]
|
||||
IrretrievableState,
|
||||
@@ -15,7 +15,7 @@ impl ErrorCode for AuthenticationError {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
||||
AuthenticationError::InvalidToken(_) => Code::InvalidToken,
|
||||
AuthenticationError::InvalidToken => Code::InvalidToken,
|
||||
AuthenticationError::IrretrievableState => Code::Internal,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,28 +2,80 @@ mod error;
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::pin::Pin;
|
||||
|
||||
use actix_web::FromRequest;
|
||||
use futures::future::err;
|
||||
use futures::future::{ok, Ready};
|
||||
use meilisearch_error::ResponseError;
|
||||
|
||||
use error::AuthenticationError;
|
||||
use futures::future::err;
|
||||
use futures::Future;
|
||||
use meilisearch_auth::{AuthController, AuthFilter};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
pub struct GuardedData<T, D> {
|
||||
pub struct GuardedData<P, D> {
|
||||
data: D,
|
||||
filters: AuthFilter,
|
||||
_marker: PhantomData<T>,
|
||||
_marker: PhantomData<P>,
|
||||
}
|
||||
|
||||
impl<T, D> GuardedData<T, D> {
|
||||
impl<P, D> GuardedData<P, D> {
|
||||
pub fn filters(&self) -> &AuthFilter {
|
||||
&self.filters
|
||||
}
|
||||
|
||||
async fn auth_bearer(
|
||||
auth: AuthController,
|
||||
token: String,
|
||||
index: Option<String>,
|
||||
data: Option<D>,
|
||||
) -> Result<Self, ResponseError>
|
||||
where
|
||||
P: Policy + 'static,
|
||||
{
|
||||
match Self::authenticate(auth, token, index).await? {
|
||||
Some(filters) => match data {
|
||||
Some(data) => Ok(Self {
|
||||
data,
|
||||
filters,
|
||||
_marker: PhantomData,
|
||||
}),
|
||||
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||
},
|
||||
None => Err(AuthenticationError::InvalidToken.into()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn auth_token(auth: AuthController, data: Option<D>) -> Result<Self, ResponseError>
|
||||
where
|
||||
P: Policy + 'static,
|
||||
{
|
||||
match Self::authenticate(auth, String::new(), None).await? {
|
||||
Some(filters) => match data {
|
||||
Some(data) => Ok(Self {
|
||||
data,
|
||||
filters,
|
||||
_marker: PhantomData,
|
||||
}),
|
||||
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||
},
|
||||
None => Err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn authenticate(
|
||||
auth: AuthController,
|
||||
token: String,
|
||||
index: Option<String>,
|
||||
) -> Result<Option<AuthFilter>, ResponseError>
|
||||
where
|
||||
P: Policy + 'static,
|
||||
{
|
||||
tokio::task::spawn_blocking(move || P::authenticate(auth, token.as_ref(), index.as_deref()))
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, D> Deref for GuardedData<T, D> {
|
||||
impl<P, D> Deref for GuardedData<P, D> {
|
||||
type Target = D;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
@@ -32,11 +84,9 @@ impl<T, D> Deref for GuardedData<T, D> {
|
||||
}
|
||||
|
||||
impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D> {
|
||||
type Config = ();
|
||||
|
||||
type Error = ResponseError;
|
||||
|
||||
type Future = Ready<Result<Self, Self::Error>>;
|
||||
type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
|
||||
|
||||
fn from_request(
|
||||
req: &actix_web::HttpRequest,
|
||||
@@ -52,37 +102,23 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
|
||||
Some("Bearer") => {
|
||||
// TODO: find a less hardcoded way?
|
||||
let index = req.match_info().get("index_uid");
|
||||
let token = type_token.next().unwrap_or("unknown");
|
||||
match P::authenticate(auth, token, index) {
|
||||
Some(filters) => match req.app_data::<D>().cloned() {
|
||||
Some(data) => ok(Self {
|
||||
data,
|
||||
filters,
|
||||
_marker: PhantomData,
|
||||
}),
|
||||
None => err(AuthenticationError::IrretrievableState.into()),
|
||||
},
|
||||
None => {
|
||||
let token = token.to_string();
|
||||
err(AuthenticationError::InvalidToken(token).into())
|
||||
}
|
||||
match type_token.next() {
|
||||
Some(token) => Box::pin(Self::auth_bearer(
|
||||
auth,
|
||||
token.to_string(),
|
||||
index.map(String::from),
|
||||
req.app_data::<D>().cloned(),
|
||||
)),
|
||||
None => Box::pin(err(AuthenticationError::InvalidToken.into())),
|
||||
}
|
||||
}
|
||||
_otherwise => err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||
},
|
||||
None => match P::authenticate(auth, "", None) {
|
||||
Some(filters) => match req.app_data::<D>().cloned() {
|
||||
Some(data) => ok(Self {
|
||||
data,
|
||||
filters,
|
||||
_marker: PhantomData,
|
||||
}),
|
||||
None => err(AuthenticationError::IrretrievableState.into()),
|
||||
},
|
||||
None => err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||
_otherwise => {
|
||||
Box::pin(err(AuthenticationError::MissingAuthorizationHeader.into()))
|
||||
}
|
||||
},
|
||||
None => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
|
||||
},
|
||||
None => err(AuthenticationError::IrretrievableState.into()),
|
||||
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -92,27 +128,39 @@ pub trait Policy {
|
||||
}
|
||||
|
||||
pub mod policies {
|
||||
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::extractors::authentication::Policy;
|
||||
use meilisearch_auth::{Action, AuthController, AuthFilter};
|
||||
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
|
||||
// reexport actions in policies in order to be used in routes configuration.
|
||||
pub use meilisearch_auth::actions;
|
||||
|
||||
pub struct MasterPolicy;
|
||||
fn tenant_token_validation() -> Validation {
|
||||
let mut validation = Validation::default();
|
||||
validation.validate_exp = false;
|
||||
validation.required_spec_claims.remove("exp");
|
||||
validation.algorithms = vec![Algorithm::HS256, Algorithm::HS384, Algorithm::HS512];
|
||||
validation
|
||||
}
|
||||
|
||||
impl Policy for MasterPolicy {
|
||||
fn authenticate(
|
||||
auth: AuthController,
|
||||
token: &str,
|
||||
_index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
if let Some(master_key) = auth.get_master_key() {
|
||||
if master_key == token {
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
}
|
||||
/// Extracts the key id used to sign the payload, without performing any validation.
|
||||
fn extract_key_id(token: &str) -> Option<Uuid> {
|
||||
let mut validation = tenant_token_validation();
|
||||
validation.insecure_disable_signature_validation();
|
||||
let dummy_key = DecodingKey::from_secret(b"secret");
|
||||
let token_data = decode::<Claims>(token, &dummy_key, &validation).ok()?;
|
||||
|
||||
None
|
||||
}
|
||||
// get token fields without validating it.
|
||||
let Claims { api_key_uid, .. } = token_data.claims;
|
||||
Some(api_key_uid)
|
||||
}
|
||||
|
||||
fn is_keys_action(action: u8) -> bool {
|
||||
use actions::*;
|
||||
matches!(action, KEYS_GET | KEYS_CREATE | KEYS_UPDATE | KEYS_DELETE)
|
||||
}
|
||||
|
||||
pub struct ActionPolicy<const A: u8>;
|
||||
@@ -124,19 +172,83 @@ pub mod policies {
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
// authenticate if token is the master key.
|
||||
if auth.get_master_key().map_or(true, |mk| mk == token) {
|
||||
// master key can only have access to keys routes.
|
||||
// if master key is None only keys routes are inaccessible.
|
||||
if auth
|
||||
.get_master_key()
|
||||
.map_or_else(|| !is_keys_action(A), |mk| mk == token)
|
||||
{
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
|
||||
// authenticate if token is allowed.
|
||||
if let Some(action) = Action::from_repr(A) {
|
||||
let index = index.map(|i| i.as_bytes());
|
||||
if let Ok(true) = auth.authenticate(token.as_bytes(), action, index) {
|
||||
return auth.get_key_filters(token).ok();
|
||||
// Tenant token
|
||||
if let Some(filters) = ActionPolicy::<A>::authenticate_tenant_token(&auth, token, index)
|
||||
{
|
||||
return Some(filters);
|
||||
} else if let Some(action) = Action::from_repr(A) {
|
||||
// API key
|
||||
if let Ok(Some(uid)) = auth.get_optional_uid_from_encoded_key(token.as_bytes()) {
|
||||
if let Ok(true) = auth.is_key_authorized(uid, action, index) {
|
||||
return auth.get_key_filters(uid, None).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<const A: u8> ActionPolicy<A> {
|
||||
fn authenticate_tenant_token(
|
||||
auth: &AuthController,
|
||||
token: &str,
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
// Only search action can be accessed by a tenant token.
|
||||
if A != actions::SEARCH {
|
||||
return None;
|
||||
}
|
||||
|
||||
let uid = extract_key_id(token)?;
|
||||
// check if parent key is authorized to do the action.
|
||||
if auth.is_key_authorized(uid, Action::Search, index).ok()? {
|
||||
// Check if tenant token is valid.
|
||||
let key = auth.generate_key(uid)?;
|
||||
let data = decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(key.as_bytes()),
|
||||
&tenant_token_validation(),
|
||||
)
|
||||
.ok()?;
|
||||
|
||||
// Check index access if an index restriction is provided.
|
||||
if let Some(index) = index {
|
||||
if !data.claims.search_rules.is_index_authorized(index) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if token is expired.
|
||||
if let Some(exp) = data.claims.exp {
|
||||
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
return auth
|
||||
.get_key_filters(uid, Some(data.claims.search_rules))
|
||||
.ok();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct Claims {
|
||||
search_rules: SearchRules,
|
||||
exp: Option<i64>,
|
||||
api_key_uid: Uuid,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod payload;
|
||||
#[macro_use]
|
||||
pub mod authentication;
|
||||
pub mod sequential_extractor;
|
||||
|
||||
@@ -28,8 +28,6 @@ impl Default for PayloadConfig {
|
||||
}
|
||||
|
||||
impl FromRequest for Payload {
|
||||
type Config = PayloadConfig;
|
||||
|
||||
type Error = PayloadError;
|
||||
|
||||
type Future = Ready<Result<Payload, Self::Error>>;
|
||||
@@ -39,7 +37,7 @@ impl FromRequest for Payload {
|
||||
let limit = req
|
||||
.app_data::<PayloadConfig>()
|
||||
.map(|c| c.limit)
|
||||
.unwrap_or(Self::Config::default().limit);
|
||||
.unwrap_or(PayloadConfig::default().limit);
|
||||
ready(Ok(Payload {
|
||||
payload: payload.take(),
|
||||
limit,
|
||||
|
||||
148
meilisearch-http/src/extractors/sequential_extractor.rs
Normal file
148
meilisearch-http/src/extractors/sequential_extractor.rs
Normal file
@@ -0,0 +1,148 @@
|
||||
#![allow(non_snake_case)]
|
||||
use std::{future::Future, pin::Pin, task::Poll};
|
||||
|
||||
use actix_web::{dev::Payload, FromRequest, Handler, HttpRequest};
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
/// `SeqHandler` is an actix `Handler` that enforces that extractors errors are returned in the
|
||||
/// same order as they are defined in the wrapped handler. This is needed because, by default, actix
|
||||
/// resolves the extractors concurrently, whereas we always need the authentication extractor to
|
||||
/// throw first.
|
||||
#[derive(Clone)]
|
||||
pub struct SeqHandler<H>(pub H);
|
||||
|
||||
pub struct SeqFromRequest<T>(T);
|
||||
|
||||
/// This macro implements `FromRequest` for arbitrary arity handler, except for one, which is
|
||||
/// useless anyway.
|
||||
macro_rules! gen_seq {
|
||||
($ty:ident; $($T:ident)+) => {
|
||||
pin_project! {
|
||||
pub struct $ty<$($T: FromRequest), +> {
|
||||
$(
|
||||
#[pin]
|
||||
$T: ExtractFuture<$T::Future, $T, $T::Error>,
|
||||
)+
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($T: FromRequest), +> Future for $ty<$($T),+> {
|
||||
type Output = Result<SeqFromRequest<($($T),+)>, actix_web::Error>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
|
||||
let mut this = self.project();
|
||||
|
||||
let mut count_fut = 0;
|
||||
let mut count_finished = 0;
|
||||
|
||||
$(
|
||||
count_fut += 1;
|
||||
match this.$T.as_mut().project() {
|
||||
ExtractProj::Future { fut } => match fut.poll(cx) {
|
||||
Poll::Ready(Ok(output)) => {
|
||||
count_finished += 1;
|
||||
let _ = this
|
||||
.$T
|
||||
.as_mut()
|
||||
.project_replace(ExtractFuture::Done { output });
|
||||
}
|
||||
Poll::Ready(Err(error)) => {
|
||||
count_finished += 1;
|
||||
let _ = this
|
||||
.$T
|
||||
.as_mut()
|
||||
.project_replace(ExtractFuture::Error { error });
|
||||
}
|
||||
Poll::Pending => (),
|
||||
},
|
||||
ExtractProj::Done { .. } => count_finished += 1,
|
||||
ExtractProj::Error { .. } => {
|
||||
// short circuit if all previous are finished and we had an error.
|
||||
if count_finished == count_fut {
|
||||
match this.$T.project_replace(ExtractFuture::Empty) {
|
||||
ExtractReplaceProj::Error { error } => {
|
||||
return Poll::Ready(Err(error.into()))
|
||||
}
|
||||
_ => unreachable!("Invalid future state"),
|
||||
}
|
||||
} else {
|
||||
count_finished += 1;
|
||||
}
|
||||
}
|
||||
ExtractProj::Empty => unreachable!("From request polled after being finished. {}", stringify!($T)),
|
||||
}
|
||||
)+
|
||||
|
||||
if count_fut == count_finished {
|
||||
let result = (
|
||||
$(
|
||||
match this.$T.project_replace(ExtractFuture::Empty) {
|
||||
ExtractReplaceProj::Done { output } => output,
|
||||
ExtractReplaceProj::Error { error } => return Poll::Ready(Err(error.into())),
|
||||
_ => unreachable!("Invalid future state"),
|
||||
},
|
||||
)+
|
||||
);
|
||||
|
||||
Poll::Ready(Ok(SeqFromRequest(result)))
|
||||
} else {
|
||||
Poll::Pending
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($T: FromRequest,)+> FromRequest for SeqFromRequest<($($T,)+)> {
|
||||
type Error = actix_web::Error;
|
||||
|
||||
type Future = $ty<$($T),+>;
|
||||
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
$ty {
|
||||
$(
|
||||
$T: ExtractFuture::Future {
|
||||
fut: $T::from_request(req, payload),
|
||||
},
|
||||
)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Han, $($T: FromRequest),+> Handler<SeqFromRequest<($($T),+)>> for SeqHandler<Han>
|
||||
where
|
||||
Han: Handler<($($T),+)>,
|
||||
{
|
||||
type Output = Han::Output;
|
||||
type Future = Han::Future;
|
||||
|
||||
fn call(&self, args: SeqFromRequest<($($T),+)>) -> Self::Future {
|
||||
self.0.call(args.0)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not working for a single argument, but then, it is not really necessary.
|
||||
// gen_seq! { SeqFromRequestFut1; A }
|
||||
gen_seq! { SeqFromRequestFut2; A B }
|
||||
gen_seq! { SeqFromRequestFut3; A B C }
|
||||
gen_seq! { SeqFromRequestFut4; A B C D }
|
||||
gen_seq! { SeqFromRequestFut5; A B C D E }
|
||||
gen_seq! { SeqFromRequestFut6; A B C D E F }
|
||||
|
||||
pin_project! {
|
||||
#[project = ExtractProj]
|
||||
#[project_replace = ExtractReplaceProj]
|
||||
enum ExtractFuture<Fut, Res, Err> {
|
||||
Future {
|
||||
#[pin]
|
||||
fut: Fut,
|
||||
},
|
||||
Done {
|
||||
output: Res,
|
||||
},
|
||||
Error {
|
||||
error: Err,
|
||||
},
|
||||
Empty,
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,11 @@
|
||||
use meilisearch_lib::heed::Env;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
pub trait EnvSizer {
|
||||
fn size(&self) -> u64;
|
||||
}
|
||||
|
||||
impl EnvSizer for heed::Env {
|
||||
impl EnvSizer for Env {
|
||||
fn size(&self) -> u64 {
|
||||
WalkDir::new(self.path())
|
||||
.into_iter()
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
#[macro_use]
|
||||
pub mod error;
|
||||
pub mod analytics;
|
||||
mod task;
|
||||
pub mod task;
|
||||
#[macro_use]
|
||||
pub mod extractors;
|
||||
pub mod helpers;
|
||||
pub mod option;
|
||||
pub mod routes;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::error::MeilisearchHttpError;
|
||||
@@ -25,16 +25,29 @@ use extractors::payload::PayloadConfig;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
|
||||
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
||||
let mut meilisearch = MeiliSearch::builder();
|
||||
|
||||
// enable autobatching?
|
||||
AUTOBATCHING_ENABLED.store(
|
||||
opt.scheduler_options.enable_auto_batching,
|
||||
std::sync::atomic::Ordering::Relaxed,
|
||||
);
|
||||
|
||||
meilisearch
|
||||
.set_max_index_size(opt.max_index_size.get_bytes() as usize)
|
||||
.set_max_task_store_size(opt.max_task_db_size.get_bytes() as usize)
|
||||
// snapshot
|
||||
.set_ignore_missing_snapshot(opt.ignore_missing_snapshot)
|
||||
.set_ignore_snapshot_if_db_exists(opt.ignore_snapshot_if_db_exists)
|
||||
.set_dump_dst(opt.dumps_dir.clone())
|
||||
.set_snapshot_interval(Duration::from_secs(opt.snapshot_interval_sec))
|
||||
.set_snapshot_dir(opt.snapshot_dir.clone());
|
||||
.set_snapshot_dir(opt.snapshot_dir.clone())
|
||||
// dump
|
||||
.set_ignore_missing_dump(opt.ignore_missing_dump)
|
||||
.set_ignore_dump_if_db_exists(opt.ignore_dump_if_db_exists)
|
||||
.set_dump_dst(opt.dumps_dir.clone());
|
||||
|
||||
if let Some(ref path) = opt.import_snapshot {
|
||||
meilisearch.set_import_snapshot(path.clone());
|
||||
@@ -48,7 +61,11 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
|
||||
meilisearch.set_schedule_snapshot();
|
||||
}
|
||||
|
||||
meilisearch.build(opt.db_path.clone(), opt.indexer_options.clone())
|
||||
meilisearch.build(
|
||||
opt.db_path.clone(),
|
||||
opt.indexer_options.clone(),
|
||||
opt.scheduler_options.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn configure_data(
|
||||
@@ -90,7 +107,7 @@ pub fn configure_data(
|
||||
#[cfg(feature = "mini-dashboard")]
|
||||
pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web_static_files::Resource;
|
||||
use static_files::Resource;
|
||||
|
||||
mod generated {
|
||||
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
|
||||
@@ -105,13 +122,13 @@ pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
||||
} = resource;
|
||||
// Redirect index.html to /
|
||||
if path == "index.html" {
|
||||
config.service(web::resource("/").route(
|
||||
web::get().to(move || HttpResponse::Ok().content_type(mime_type).body(data)),
|
||||
));
|
||||
config.service(web::resource("/").route(web::get().to(move || async move {
|
||||
HttpResponse::Ok().content_type(mime_type).body(data)
|
||||
})));
|
||||
} else {
|
||||
config.service(web::resource(path).route(
|
||||
web::get().to(move || HttpResponse::Ok().content_type(mime_type).body(data)),
|
||||
));
|
||||
config.service(web::resource(path).route(web::get().to(move || async move {
|
||||
HttpResponse::Ok().content_type(mime_type).body(data)
|
||||
})));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -131,10 +148,10 @@ macro_rules! create_app {
|
||||
use actix_web::middleware::TrailingSlash;
|
||||
use actix_web::App;
|
||||
use actix_web::{middleware, web};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_http::error::MeilisearchHttpError;
|
||||
use meilisearch_http::routes;
|
||||
use meilisearch_http::{configure_data, dashboard};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
|
||||
App::new()
|
||||
.configure(|s| configure_data(s, $data.clone(), $auth.clone(), &$opt, $analytics))
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::http::KeepAlive;
|
||||
use actix_web::HttpServer;
|
||||
use clap::Parser;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_http::analytics;
|
||||
use meilisearch_http::analytics::Analytics;
|
||||
use meilisearch_http::{create_app, setup_meilisearch, Opt};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
#[global_allocator]
|
||||
@@ -29,7 +30,7 @@ fn setup(opt: &Opt) -> anyhow::Result<()> {
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let opt = Opt::from_args();
|
||||
let opt = Opt::parse();
|
||||
|
||||
setup(&opt)?;
|
||||
|
||||
@@ -50,7 +51,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
let auth_controller = AuthController::new(&opt.db_path, &opt.master_key)?;
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
let (analytics, user) = if opt.analytics() {
|
||||
let (analytics, user) = if !opt.no_analytics {
|
||||
analytics::SegmentAnalytics::new(&opt, &meilisearch).await
|
||||
} else {
|
||||
analytics::MockAnalytics::new(&opt)
|
||||
@@ -83,7 +84,8 @@ async fn run_http(
|
||||
)
|
||||
})
|
||||
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
||||
.disable_signals();
|
||||
.disable_signals()
|
||||
.keep_alive(KeepAlive::Os);
|
||||
|
||||
if let Some(config) = opt.get_ssl_config()? {
|
||||
http_server
|
||||
@@ -101,14 +103,14 @@ pub fn print_launch_resume(opt: &Opt, user: &str) {
|
||||
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||
|
||||
let ascii_name = r#"
|
||||
888b d888 d8b 888 d8b .d8888b. 888
|
||||
8888b d8888 Y8P 888 Y8P d88P Y88b 888
|
||||
88888b.d88888 888 Y88b. 888
|
||||
888Y88888P888 .d88b. 888 888 888 "Y888b. .d88b. 8888b. 888d888 .d8888b 88888b.
|
||||
888 Y888P 888 d8P Y8b 888 888 888 "Y88b. d8P Y8b "88b 888P" d88P" 888 "88b
|
||||
888 Y8P 888 88888888 888 888 888 "888 88888888 .d888888 888 888 888 888
|
||||
888 " 888 Y8b. 888 888 888 Y88b d88P Y8b. 888 888 888 Y88b. 888 888
|
||||
888 888 "Y8888 888 888 888 "Y8888P" "Y8888 "Y888888 888 "Y8888P 888 888
|
||||
888b d888 d8b 888 d8b 888
|
||||
8888b d8888 Y8P 888 Y8P 888
|
||||
88888b.d88888 888 888
|
||||
888Y88888P888 .d88b. 888 888 888 .d8888b .d88b. 8888b. 888d888 .d8888b 88888b.
|
||||
888 Y888P 888 d8P Y8b 888 888 888 88K d8P Y8b "88b 888P" d88P" 888 "88b
|
||||
888 Y8P 888 88888888 888 888 888 "Y8888b. 88888888 .d888888 888 888 888 888
|
||||
888 " 888 Y8b. 888 888 888 X88 Y8b. 888 888 888 Y88b. 888 888
|
||||
888 888 "Y8888 888 888 888 88888P' "Y8888 "Y888888 888 "Y8888P 888 888
|
||||
"#;
|
||||
|
||||
eprintln!("{}", ascii_name);
|
||||
@@ -125,10 +127,10 @@ pub fn print_launch_resume(opt: &Opt, user: &str) {
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
{
|
||||
if opt.analytics() {
|
||||
if !opt.no_analytics {
|
||||
eprintln!(
|
||||
"
|
||||
Thank you for using MeiliSearch!
|
||||
Thank you for using Meilisearch!
|
||||
|
||||
We collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html
|
||||
|
||||
@@ -146,7 +148,7 @@ Anonymous telemetry:\t\"Enabled\""
|
||||
eprintln!();
|
||||
|
||||
if opt.master_key.is_some() {
|
||||
eprintln!("A Master Key has been set. Requests to MeiliSearch won't be authorized unless you provide an authentication key.");
|
||||
eprintln!("A Master Key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
|
||||
} else {
|
||||
eprintln!("No master key found; The server will accept unidentified requests. \
|
||||
If you need some protection in development mode, please export a key: export MEILI_MASTER_KEY=xxx");
|
||||
|
||||
@@ -4,144 +4,169 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use byte_unit::Byte;
|
||||
use meilisearch_lib::options::IndexerOpts;
|
||||
use rustls::internal::pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||
use clap::Parser;
|
||||
use meilisearch_lib::options::{IndexerOpts, SchedulerConfig};
|
||||
use rustls::{
|
||||
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, NoClientAuth,
|
||||
server::{
|
||||
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient,
|
||||
ServerSessionMemoryCache,
|
||||
},
|
||||
RootCertStore,
|
||||
};
|
||||
use structopt::StructOpt;
|
||||
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||
use serde::Serialize;
|
||||
|
||||
const POSSIBLE_ENV: [&str; 2] = ["development", "production"];
|
||||
|
||||
#[derive(Debug, Clone, StructOpt)]
|
||||
#[derive(Debug, Clone, Parser, Serialize)]
|
||||
#[clap(version)]
|
||||
pub struct Opt {
|
||||
/// The destination where the database must be created.
|
||||
#[structopt(long, env = "MEILI_DB_PATH", default_value = "./data.ms")]
|
||||
#[clap(long, env = "MEILI_DB_PATH", default_value = "./data.ms")]
|
||||
pub db_path: PathBuf,
|
||||
|
||||
/// The address on which the http server will listen.
|
||||
#[structopt(long, env = "MEILI_HTTP_ADDR", default_value = "127.0.0.1:7700")]
|
||||
#[clap(long, env = "MEILI_HTTP_ADDR", default_value = "127.0.0.1:7700")]
|
||||
pub http_addr: String,
|
||||
|
||||
/// The master key allowing you to do everything on the server.
|
||||
#[structopt(long, env = "MEILI_MASTER_KEY")]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_MASTER_KEY")]
|
||||
pub master_key: Option<String>,
|
||||
|
||||
/// This environment variable must be set to `production` if you are running in production.
|
||||
/// If the server is running in development mode more logs will be displayed,
|
||||
/// and the master key can be avoided which implies that there is no security on the updates routes.
|
||||
/// This is useful to debug when integrating the engine with another service.
|
||||
#[structopt(long, env = "MEILI_ENV", default_value = "development", possible_values = &POSSIBLE_ENV)]
|
||||
#[clap(long, env = "MEILI_ENV", default_value = "development", possible_values = &POSSIBLE_ENV)]
|
||||
pub env: String,
|
||||
|
||||
/// Do not send analytics to Meili.
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
#[structopt(long, env = "MEILI_NO_ANALYTICS")]
|
||||
pub no_analytics: Option<Option<bool>>,
|
||||
#[serde(skip)] // we can't send true
|
||||
#[clap(long, env = "MEILI_NO_ANALYTICS")]
|
||||
pub no_analytics: bool,
|
||||
|
||||
/// The maximum size, in bytes, of the main lmdb database directory
|
||||
#[structopt(long, env = "MEILI_MAX_INDEX_SIZE", default_value = "100 GiB")]
|
||||
#[clap(long, env = "MEILI_MAX_INDEX_SIZE", default_value = "100 GiB")]
|
||||
pub max_index_size: Byte,
|
||||
|
||||
/// The maximum size, in bytes, of the update lmdb database directory
|
||||
#[structopt(long, env = "MEILI_MAX_TASK_DB_SIZE", default_value = "100 GiB")]
|
||||
#[clap(long, env = "MEILI_MAX_TASK_DB_SIZE", default_value = "100 GiB")]
|
||||
pub max_task_db_size: Byte,
|
||||
|
||||
/// The maximum size, in bytes, of accepted JSON payloads
|
||||
#[structopt(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "100 MB")]
|
||||
#[clap(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "100 MB")]
|
||||
pub http_payload_size_limit: Byte,
|
||||
|
||||
/// Read server certificates from CERTFILE.
|
||||
/// This should contain PEM-format certificates
|
||||
/// in the right order (the first certificate should
|
||||
/// certify KEYFILE, the last should be a root CA).
|
||||
#[structopt(long, env = "MEILI_SSL_CERT_PATH", parse(from_os_str))]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_CERT_PATH", parse(from_os_str))]
|
||||
pub ssl_cert_path: Option<PathBuf>,
|
||||
|
||||
/// Read private key from KEYFILE. This should be a RSA
|
||||
/// private key or PKCS8-encoded private key, in PEM format.
|
||||
#[structopt(long, env = "MEILI_SSL_KEY_PATH", parse(from_os_str))]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_KEY_PATH", parse(from_os_str))]
|
||||
pub ssl_key_path: Option<PathBuf>,
|
||||
|
||||
/// Enable client authentication, and accept certificates
|
||||
/// signed by those roots provided in CERTFILE.
|
||||
#[structopt(long, env = "MEILI_SSL_AUTH_PATH", parse(from_os_str))]
|
||||
#[clap(long, env = "MEILI_SSL_AUTH_PATH", parse(from_os_str))]
|
||||
#[serde(skip)]
|
||||
pub ssl_auth_path: Option<PathBuf>,
|
||||
|
||||
/// Read DER-encoded OCSP response from OCSPFILE and staple to certificate.
|
||||
/// Optional
|
||||
#[structopt(long, env = "MEILI_SSL_OCSP_PATH", parse(from_os_str))]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_OCSP_PATH", parse(from_os_str))]
|
||||
pub ssl_ocsp_path: Option<PathBuf>,
|
||||
|
||||
/// Send a fatal alert if the client does not complete client authentication.
|
||||
#[structopt(long, env = "MEILI_SSL_REQUIRE_AUTH")]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_REQUIRE_AUTH")]
|
||||
pub ssl_require_auth: bool,
|
||||
|
||||
/// SSL support session resumption
|
||||
#[structopt(long, env = "MEILI_SSL_RESUMPTION")]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_RESUMPTION")]
|
||||
pub ssl_resumption: bool,
|
||||
|
||||
/// SSL support tickets.
|
||||
#[structopt(long, env = "MEILI_SSL_TICKETS")]
|
||||
#[serde(skip)]
|
||||
#[clap(long, env = "MEILI_SSL_TICKETS")]
|
||||
pub ssl_tickets: bool,
|
||||
|
||||
/// Defines the path of the snapshot file to import.
|
||||
/// This option will, by default, stop the process if a database already exist or if no snapshot exists at
|
||||
/// the given path. If this option is not specified no snapshot is imported.
|
||||
#[structopt(long)]
|
||||
#[clap(long)]
|
||||
pub import_snapshot: Option<PathBuf>,
|
||||
|
||||
/// The engine will ignore a missing snapshot and not return an error in such case.
|
||||
#[structopt(long, requires = "import-snapshot")]
|
||||
#[clap(long, requires = "import-snapshot")]
|
||||
pub ignore_missing_snapshot: bool,
|
||||
|
||||
/// The engine will skip snapshot importation and not return an error in such case.
|
||||
#[structopt(long, requires = "import-snapshot")]
|
||||
#[clap(long, requires = "import-snapshot")]
|
||||
pub ignore_snapshot_if_db_exists: bool,
|
||||
|
||||
/// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap.
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_DIR", default_value = "snapshots/")]
|
||||
#[clap(long, env = "MEILI_SNAPSHOT_DIR", default_value = "snapshots/")]
|
||||
pub snapshot_dir: PathBuf,
|
||||
|
||||
/// Activate snapshot scheduling.
|
||||
#[structopt(long, env = "MEILI_SCHEDULE_SNAPSHOT")]
|
||||
#[clap(long, env = "MEILI_SCHEDULE_SNAPSHOT")]
|
||||
pub schedule_snapshot: bool,
|
||||
|
||||
/// Defines time interval, in seconds, between each snapshot creation.
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_INTERVAL_SEC", default_value = "86400")] // 24h
|
||||
#[clap(long, env = "MEILI_SNAPSHOT_INTERVAL_SEC", default_value = "86400")] // 24h
|
||||
pub snapshot_interval_sec: u64,
|
||||
|
||||
/// Folder where dumps are created when the dump route is called.
|
||||
#[structopt(long, env = "MEILI_DUMPS_DIR", default_value = "dumps/")]
|
||||
pub dumps_dir: PathBuf,
|
||||
|
||||
/// Import a dump from the specified path, must be a `.dump` file.
|
||||
#[structopt(long, conflicts_with = "import-snapshot")]
|
||||
#[clap(long, conflicts_with = "import-snapshot")]
|
||||
pub import_dump: Option<PathBuf>,
|
||||
|
||||
/// If the dump doesn't exists, load or create the database specified by `db-path` instead.
|
||||
#[clap(long, requires = "import-dump")]
|
||||
pub ignore_missing_dump: bool,
|
||||
|
||||
/// Ignore the dump if a database already exists, and load that database instead.
|
||||
#[clap(long, requires = "import-dump")]
|
||||
pub ignore_dump_if_db_exists: bool,
|
||||
|
||||
/// Folder where dumps are created when the dump route is called.
|
||||
#[clap(long, env = "MEILI_DUMPS_DIR", default_value = "dumps/")]
|
||||
pub dumps_dir: PathBuf,
|
||||
|
||||
/// Set the log level
|
||||
#[structopt(long, env = "MEILI_LOG_LEVEL", default_value = "info")]
|
||||
#[clap(long, env = "MEILI_LOG_LEVEL", default_value = "info")]
|
||||
pub log_level: String,
|
||||
|
||||
#[structopt(skip)]
|
||||
#[serde(flatten)]
|
||||
#[clap(flatten)]
|
||||
pub indexer_options: IndexerOpts,
|
||||
|
||||
#[serde(flatten)]
|
||||
#[clap(flatten)]
|
||||
pub scheduler_options: SchedulerConfig,
|
||||
}
|
||||
|
||||
impl Opt {
|
||||
/// Wether analytics should be enabled or not.
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
pub fn analytics(&self) -> bool {
|
||||
match self.no_analytics {
|
||||
None => true,
|
||||
Some(None) => false,
|
||||
Some(Some(disabled)) => !disabled,
|
||||
}
|
||||
!self.no_analytics
|
||||
}
|
||||
|
||||
pub fn get_ssl_config(&self) -> anyhow::Result<Option<rustls::ServerConfig>> {
|
||||
if let (Some(cert_path), Some(key_path)) = (&self.ssl_cert_path, &self.ssl_key_path) {
|
||||
let client_auth = match &self.ssl_auth_path {
|
||||
let config = rustls::ServerConfig::builder().with_safe_defaults();
|
||||
|
||||
let config = match &self.ssl_auth_path {
|
||||
Some(auth_path) => {
|
||||
let roots = load_certs(auth_path.to_path_buf())?;
|
||||
let mut client_auth_roots = RootCertStore::empty();
|
||||
@@ -149,30 +174,32 @@ impl Opt {
|
||||
client_auth_roots.add(&root).unwrap();
|
||||
}
|
||||
if self.ssl_require_auth {
|
||||
AllowAnyAuthenticatedClient::new(client_auth_roots)
|
||||
let verifier = AllowAnyAuthenticatedClient::new(client_auth_roots);
|
||||
config.with_client_cert_verifier(verifier)
|
||||
} else {
|
||||
AllowAnyAnonymousOrAuthenticatedClient::new(client_auth_roots)
|
||||
let verifier =
|
||||
AllowAnyAnonymousOrAuthenticatedClient::new(client_auth_roots);
|
||||
config.with_client_cert_verifier(verifier)
|
||||
}
|
||||
}
|
||||
None => NoClientAuth::new(),
|
||||
None => config.with_no_client_auth(),
|
||||
};
|
||||
|
||||
let mut config = rustls::ServerConfig::new(client_auth);
|
||||
config.key_log = Arc::new(rustls::KeyLogFile::new());
|
||||
|
||||
let certs = load_certs(cert_path.to_path_buf())?;
|
||||
let privkey = load_private_key(key_path.to_path_buf())?;
|
||||
let ocsp = load_ocsp(&self.ssl_ocsp_path)?;
|
||||
config
|
||||
.set_single_cert_with_ocsp_and_sct(certs, privkey, ocsp, vec![])
|
||||
let mut config = config
|
||||
.with_single_cert_with_ocsp_and_sct(certs, privkey, ocsp, vec![])
|
||||
.map_err(|_| anyhow::anyhow!("bad certificates/private key"))?;
|
||||
|
||||
config.key_log = Arc::new(rustls::KeyLogFile::new());
|
||||
|
||||
if self.ssl_resumption {
|
||||
config.set_persistence(rustls::ServerSessionMemoryCache::new(256));
|
||||
config.session_storage = ServerSessionMemoryCache::new(256);
|
||||
}
|
||||
|
||||
if self.ssl_tickets {
|
||||
config.ticketer = rustls::Ticketer::new();
|
||||
config.ticketer = rustls::Ticketer::new().unwrap();
|
||||
}
|
||||
|
||||
Ok(Some(config))
|
||||
@@ -186,7 +213,9 @@ fn load_certs(filename: PathBuf) -> anyhow::Result<Vec<rustls::Certificate>> {
|
||||
let certfile =
|
||||
fs::File::open(filename).map_err(|_| anyhow::anyhow!("cannot open certificate file"))?;
|
||||
let mut reader = BufReader::new(certfile);
|
||||
certs(&mut reader).map_err(|_| anyhow::anyhow!("cannot read certificate file"))
|
||||
certs(&mut reader)
|
||||
.map(|certs| certs.into_iter().map(rustls::Certificate).collect())
|
||||
.map_err(|_| anyhow::anyhow!("cannot read certificate file"))
|
||||
}
|
||||
|
||||
fn load_private_key(filename: PathBuf) -> anyhow::Result<rustls::PrivateKey> {
|
||||
@@ -211,10 +240,10 @@ fn load_private_key(filename: PathBuf) -> anyhow::Result<rustls::PrivateKey> {
|
||||
|
||||
// prefer to load pkcs8 keys
|
||||
if !pkcs8_keys.is_empty() {
|
||||
Ok(pkcs8_keys[0].clone())
|
||||
Ok(rustls::PrivateKey(pkcs8_keys[0].clone()))
|
||||
} else {
|
||||
assert!(!rsa_keys.is_empty());
|
||||
Ok(rsa_keys[0].clone())
|
||||
Ok(rustls::PrivateKey(rsa_keys[0].clone()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,3 +259,13 @@ fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> {
|
||||
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_valid_opt() {
|
||||
assert!(Opt::try_parse_from(Some("")).is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,134 +1,160 @@
|
||||
use std::str;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::SecondsFormat;
|
||||
|
||||
use meilisearch_auth::{generate_key, Action, AuthController, Key};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
use crate::extractors::{
|
||||
authentication::{policies::*, GuardedData},
|
||||
sequential_extractor::SeqHandler,
|
||||
};
|
||||
use crate::routes::Pagination;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::post().to(create_api_key))
|
||||
.route(web::get().to(list_api_keys)),
|
||||
.route(web::post().to(SeqHandler(create_api_key)))
|
||||
.route(web::get().to(SeqHandler(list_api_keys))),
|
||||
)
|
||||
.service(
|
||||
web::resource("/{api_key}")
|
||||
.route(web::get().to(get_api_key))
|
||||
.route(web::patch().to(patch_api_key))
|
||||
.route(web::delete().to(delete_api_key)),
|
||||
web::resource("/{key}")
|
||||
.route(web::get().to(SeqHandler(get_api_key)))
|
||||
.route(web::patch().to(SeqHandler(patch_api_key)))
|
||||
.route(web::delete().to(SeqHandler(delete_api_key))),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn create_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
|
||||
body: web::Json<Value>,
|
||||
_req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let key = auth_controller.create_key(body.into_inner()).await?;
|
||||
let res = KeyView::from_key(key, auth_controller.get_master_key());
|
||||
let v = body.into_inner();
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let key = auth_controller.create_key(v)?;
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Created().json(res))
|
||||
}
|
||||
|
||||
pub async fn list_api_keys(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
_req: HttpRequest,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||
paginate: web::Query<Pagination>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let keys = auth_controller.list_keys().await?;
|
||||
let res: Vec<_> = keys
|
||||
.into_iter()
|
||||
.map(|k| KeyView::from_key(k, auth_controller.get_master_key()))
|
||||
.collect();
|
||||
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let keys = auth_controller.list_keys()?;
|
||||
let page_view = paginate.auto_paginate_sized(
|
||||
keys.into_iter()
|
||||
.map(|k| KeyView::from_key(k, &auth_controller)),
|
||||
);
|
||||
|
||||
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
|
||||
Ok(page_view)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Ok().json(page_view))
|
||||
}
|
||||
|
||||
pub async fn get_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
// keep 8 first characters that are the ID of the API key.
|
||||
let key = auth_controller.get_key(&path.api_key).await?;
|
||||
let res = KeyView::from_key(key, auth_controller.get_master_key());
|
||||
let key = path.into_inner().key;
|
||||
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
let key = auth_controller.get_key(uid)?;
|
||||
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
|
||||
pub async fn patch_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
|
||||
body: web::Json<Value>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let key = auth_controller
|
||||
// keep 8 first characters that are the ID of the API key.
|
||||
.update_key(&path.api_key, body.into_inner())
|
||||
.await?;
|
||||
let res = KeyView::from_key(key, auth_controller.get_master_key());
|
||||
let key = path.into_inner().key;
|
||||
let body = body.into_inner();
|
||||
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
let key = auth_controller.update_key(uid, body)?;
|
||||
|
||||
Ok(KeyView::from_key(key, &auth_controller))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
|
||||
pub async fn delete_api_key(
|
||||
auth_controller: GuardedData<MasterPolicy, AuthController>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_DELETE }>, AuthController>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
// keep 8 first characters that are the ID of the API key.
|
||||
auth_controller.delete_key(&path.api_key).await?;
|
||||
let key = path.into_inner().key;
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let uid =
|
||||
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
|
||||
auth_controller.delete_key(uid)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AuthParam {
|
||||
api_key: String,
|
||||
key: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct KeyView {
|
||||
name: Option<String>,
|
||||
description: Option<String>,
|
||||
key: String,
|
||||
uid: Uuid,
|
||||
actions: Vec<Action>,
|
||||
indexes: Vec<String>,
|
||||
expires_at: Option<String>,
|
||||
created_at: String,
|
||||
updated_at: String,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
expires_at: Option<OffsetDateTime>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
created_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
updated_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl KeyView {
|
||||
fn from_key(key: Key, master_key: Option<&String>) -> Self {
|
||||
let key_id = str::from_utf8(&key.id).unwrap();
|
||||
let generated_key = match master_key {
|
||||
Some(master_key) => generate_key(master_key.as_bytes(), key_id),
|
||||
None => generate_key(&[], key_id),
|
||||
};
|
||||
fn from_key(key: Key, auth: &AuthController) -> Self {
|
||||
let generated_key = auth.generate_key(key.uid).unwrap_or_default();
|
||||
|
||||
KeyView {
|
||||
name: key.name,
|
||||
description: key.description,
|
||||
key: generated_key,
|
||||
uid: key.uid,
|
||||
actions: key.actions,
|
||||
indexes: key.indexes,
|
||||
expires_at: key
|
||||
.expires_at
|
||||
.map(|dt| dt.to_rfc3339_opts(SecondsFormat::Secs, true)),
|
||||
created_at: key.created_at.to_rfc3339_opts(SecondsFormat::Secs, true),
|
||||
updated_at: key.updated_at.to_rfc3339_opts(SecondsFormat::Secs, true),
|
||||
indexes: key.indexes.into_iter().map(String::from).collect(),
|
||||
expires_at: key.expires_at,
|
||||
created_at: key.created_at,
|
||||
updated_at: key.updated_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct KeyListView {
|
||||
results: Vec<KeyView>,
|
||||
}
|
||||
|
||||
impl From<Vec<KeyView>> for KeyListView {
|
||||
fn from(results: Vec<KeyView>) -> Self {
|
||||
Self { results }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(create_dump)))
|
||||
.service(web::resource("/{dump_uid}/status").route(web::get().to(get_dump_status)));
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
||||
}
|
||||
|
||||
pub async fn create_dump(
|
||||
@@ -20,29 +20,8 @@ pub async fn create_dump(
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
||||
|
||||
let res = meilisearch.create_dump().await?;
|
||||
let res: SummarizedTaskView = meilisearch.register_dump_task().await?.into();
|
||||
|
||||
debug!("returns: {:?}", res);
|
||||
Ok(HttpResponse::Accepted().json(res))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DumpStatusResponse {
|
||||
status: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DumpParam {
|
||||
dump_uid: String,
|
||||
}
|
||||
|
||||
async fn get_dump_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DUMPS_GET }>, MeiliSearch>,
|
||||
path: web::Path<DumpParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let res = meilisearch.dump_info(path.dump_uid.clone()).await?;
|
||||
|
||||
debug!("returns: {:?}", res);
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
|
||||
@@ -6,13 +6,15 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use bstr::ByteSlice;
|
||||
use futures::{Stream, StreamExt};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use mime::Mime;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
@@ -20,11 +22,10 @@ use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::payload::Payload;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::{fold_star_or, PaginationView};
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
|
||||
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
vec![
|
||||
"application/json".to_string(),
|
||||
@@ -45,7 +46,7 @@ fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, P
|
||||
}
|
||||
|
||||
/// Extracts the mime type from the content type and return
|
||||
/// a meilisearch error if anyhthing bad happen.
|
||||
/// a meilisearch error if anything bad happen.
|
||||
fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpError> {
|
||||
match req.mime_type() {
|
||||
Ok(Some(mime)) => Ok(Some(mime)),
|
||||
@@ -71,28 +72,38 @@ pub struct DocumentParam {
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_all_documents))
|
||||
.route(web::post().to(add_documents))
|
||||
.route(web::put().to(update_documents))
|
||||
.route(web::delete().to(clear_all_documents)),
|
||||
.route(web::get().to(SeqHandler(get_all_documents)))
|
||||
.route(web::post().to(SeqHandler(add_documents)))
|
||||
.route(web::put().to(SeqHandler(update_documents)))
|
||||
.route(web::delete().to(SeqHandler(clear_all_documents))),
|
||||
)
|
||||
// this route needs to be before the /documents/{document_id} to match properly
|
||||
.service(web::resource("/delete-batch").route(web::post().to(delete_documents)))
|
||||
.service(web::resource("/delete-batch").route(web::post().to(SeqHandler(delete_documents))))
|
||||
.service(
|
||||
web::resource("/{document_id}")
|
||||
.route(web::get().to(get_document))
|
||||
.route(web::delete().to(delete_document)),
|
||||
.route(web::get().to(SeqHandler(get_document)))
|
||||
.route(web::delete().to(SeqHandler(delete_document))),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct GetDocument {
|
||||
fields: Option<CS<StarOr<String>>>,
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, MeiliSearch>,
|
||||
path: web::Path<DocumentParam>,
|
||||
params: web::Query<GetDocument>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = path.index_uid.clone();
|
||||
let id = path.document_id.clone();
|
||||
let GetDocument { fields } = params.into_inner();
|
||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
||||
|
||||
let document = meilisearch
|
||||
.document(index, id, None as Option<Vec<String>>)
|
||||
.document(index, id, attributes_to_retrieve)
|
||||
.await?;
|
||||
debug!("returns: {:?}", document);
|
||||
Ok(HttpResponse::Ok().json(document))
|
||||
@@ -115,9 +126,11 @@ pub async fn delete_document(
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct BrowseQuery {
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
attributes_to_retrieve: Option<String>,
|
||||
#[serde(default)]
|
||||
offset: usize,
|
||||
#[serde(default = "crate::routes::PAGINATION_DEFAULT_LIMIT")]
|
||||
limit: usize,
|
||||
fields: Option<CS<StarOr<String>>>,
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(
|
||||
@@ -126,27 +139,21 @@ pub async fn get_all_documents(
|
||||
params: web::Query<BrowseQuery>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
let attributes_to_retrieve = params.attributes_to_retrieve.as_ref().and_then(|attrs| {
|
||||
let mut names = Vec::new();
|
||||
for name in attrs.split(',').map(String::from) {
|
||||
if name == "*" {
|
||||
return None;
|
||||
}
|
||||
names.push(name);
|
||||
}
|
||||
Some(names)
|
||||
});
|
||||
let BrowseQuery {
|
||||
limit,
|
||||
offset,
|
||||
fields,
|
||||
} = params.into_inner();
|
||||
let attributes_to_retrieve = fields.and_then(fold_star_or);
|
||||
|
||||
let documents = meilisearch
|
||||
.documents(
|
||||
path.into_inner(),
|
||||
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
|
||||
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
|
||||
attributes_to_retrieve,
|
||||
)
|
||||
let (total, documents) = meilisearch
|
||||
.documents(path.into_inner(), offset, limit, attributes_to_retrieve)
|
||||
.await?;
|
||||
debug!("returns: {:?}", documents);
|
||||
Ok(HttpResponse::Ok().json(documents))
|
||||
|
||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
||||
|
||||
debug!("returns: {:?}", ret);
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
|
||||
@@ -1,57 +1,60 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::SummarizedTaskView;
|
||||
|
||||
use super::Pagination;
|
||||
|
||||
pub mod documents;
|
||||
pub mod search;
|
||||
pub mod settings;
|
||||
pub mod tasks;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(list_indexes))
|
||||
.route(web::post().to(create_index)),
|
||||
.route(web::post().to(SeqHandler(create_index))),
|
||||
)
|
||||
.service(
|
||||
web::scope("/{index_uid}")
|
||||
.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_index))
|
||||
.route(web::put().to(update_index))
|
||||
.route(web::delete().to(delete_index)),
|
||||
.route(web::get().to(SeqHandler(get_index)))
|
||||
.route(web::patch().to(SeqHandler(update_index)))
|
||||
.route(web::delete().to(SeqHandler(delete_index))),
|
||||
)
|
||||
.service(web::resource("/stats").route(web::get().to(get_index_stats)))
|
||||
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
|
||||
.service(web::scope("/documents").configure(documents::configure))
|
||||
.service(web::scope("/search").configure(search::configure))
|
||||
.service(web::scope("/tasks").configure(tasks::configure))
|
||||
.service(web::scope("/settings").configure(settings::configure)),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn list_indexes(
|
||||
data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
|
||||
paginate: web::Query<Pagination>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let filters = data.filters();
|
||||
let mut indexes = data.list_indexes().await?;
|
||||
if let Some(indexes_filter) = filters.indexes.as_ref() {
|
||||
indexes = indexes
|
||||
.into_iter()
|
||||
.filter(|i| indexes_filter.contains(&i.uid))
|
||||
.collect();
|
||||
}
|
||||
let search_rules = &data.filters().search_rules;
|
||||
let indexes: Vec<_> = data.list_indexes().await?;
|
||||
let nb_indexes = indexes.len();
|
||||
let iter = indexes
|
||||
.into_iter()
|
||||
.filter(|i| search_rules.is_index_authorized(&i.uid));
|
||||
let ret = paginate
|
||||
.into_inner()
|
||||
.auto_paginate_unsized(nb_indexes, iter);
|
||||
|
||||
debug!("returns: {:?}", indexes);
|
||||
Ok(HttpResponse::Ok().json(indexes))
|
||||
debug!("returns: {:?}", ret);
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -96,9 +99,12 @@ pub struct UpdateIndexRequest {
|
||||
pub struct UpdateIndexResponse {
|
||||
name: String,
|
||||
uid: String,
|
||||
created_at: DateTime<Utc>,
|
||||
updated_at: DateTime<Utc>,
|
||||
primary_key: Option<String>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
created_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
updated_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
primary_key: OffsetDateTime,
|
||||
}
|
||||
|
||||
pub async fn get_index(
|
||||
|
||||
@@ -1,19 +1,25 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{default_crop_length, SearchQuery, DEFAULT_SEARCH_LIMIT};
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_lib::index::{
|
||||
SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
|
||||
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::analytics::{Analytics, SearchAggregator};
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(search_with_url_query))
|
||||
.route(web::post().to(search_with_post)),
|
||||
.route(web::get().to(SeqHandler(search_with_url_query)))
|
||||
.route(web::post().to(SeqHandler(search_with_post))),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -23,36 +29,26 @@ pub struct SearchQueryGet {
|
||||
q: Option<String>,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
attributes_to_retrieve: Option<String>,
|
||||
attributes_to_crop: Option<String>,
|
||||
#[serde(default = "default_crop_length")]
|
||||
attributes_to_retrieve: Option<CS<String>>,
|
||||
attributes_to_crop: Option<CS<String>>,
|
||||
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
||||
crop_length: usize,
|
||||
attributes_to_highlight: Option<String>,
|
||||
attributes_to_highlight: Option<CS<String>>,
|
||||
filter: Option<String>,
|
||||
sort: Option<String>,
|
||||
#[serde(default = "Default::default")]
|
||||
matches: bool,
|
||||
facets_distribution: Option<String>,
|
||||
show_matches_position: bool,
|
||||
facets: Option<CS<String>>,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
|
||||
highlight_pre_tag: String,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
|
||||
highlight_post_tag: String,
|
||||
#[serde(default = "DEFAULT_CROP_MARKER")]
|
||||
crop_marker: String,
|
||||
}
|
||||
|
||||
impl From<SearchQueryGet> for SearchQuery {
|
||||
fn from(other: SearchQueryGet) -> Self {
|
||||
let attributes_to_retrieve = other
|
||||
.attributes_to_retrieve
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let attributes_to_crop = other
|
||||
.attributes_to_crop
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let attributes_to_highlight = other
|
||||
.attributes_to_highlight
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let facets_distribution = other
|
||||
.facets_distribution
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect());
|
||||
|
||||
let filter = match other.filter {
|
||||
Some(f) => match serde_json::from_str(&f) {
|
||||
Ok(v) => Some(v),
|
||||
@@ -61,20 +57,45 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
None => None,
|
||||
};
|
||||
|
||||
let sort = other.sort.map(|attr| fix_sort_query_parameters(&attr));
|
||||
|
||||
Self {
|
||||
q: other.q,
|
||||
offset: other.offset,
|
||||
limit: other.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
|
||||
attributes_to_retrieve,
|
||||
attributes_to_crop,
|
||||
limit: other.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
|
||||
attributes_to_retrieve: other
|
||||
.attributes_to_retrieve
|
||||
.map(|o| o.into_iter().collect()),
|
||||
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
||||
crop_length: other.crop_length,
|
||||
attributes_to_highlight,
|
||||
attributes_to_highlight: other
|
||||
.attributes_to_highlight
|
||||
.map(|o| o.into_iter().collect()),
|
||||
filter,
|
||||
sort,
|
||||
matches: other.matches,
|
||||
facets_distribution,
|
||||
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||
show_matches_position: other.show_matches_position,
|
||||
facets: other.facets.map(|o| o.into_iter().collect()),
|
||||
highlight_pre_tag: other.highlight_pre_tag,
|
||||
highlight_post_tag: other.highlight_post_tag,
|
||||
crop_marker: other.crop_marker,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Incorporate search rules in search query
|
||||
fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) {
|
||||
query.filter = match (query.filter.take(), rules.filter) {
|
||||
(None, rules_filter) => rules_filter,
|
||||
(filter, None) => filter,
|
||||
(Some(filter), Some(rules_filter)) => {
|
||||
let filter = match filter {
|
||||
Value::Array(filter) => filter,
|
||||
filter => vec![filter],
|
||||
};
|
||||
let rules_filter = match rules_filter {
|
||||
Value::Array(rules_filter) => rules_filter,
|
||||
rules_filter => vec![rules_filter],
|
||||
};
|
||||
|
||||
Some(Value::Array([filter, rules_filter].concat()))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -90,10 +111,9 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
||||
sort_parameters.push(current_sort.to_string());
|
||||
merge = true;
|
||||
} else if merge && !sort_parameters.is_empty() {
|
||||
sort_parameters
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.push_str(&format!(",{}", current_sort));
|
||||
let s = sort_parameters.last_mut().unwrap();
|
||||
s.push(',');
|
||||
s.push_str(current_sort);
|
||||
if current_sort.ends_with("):desc") || current_sort.ends_with("):asc") {
|
||||
merge = false;
|
||||
}
|
||||
@@ -113,11 +133,21 @@ pub async fn search_with_url_query(
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!("called with params: {:?}", params);
|
||||
let query: SearchQuery = params.into_inner().into();
|
||||
let mut query: SearchQuery = params.into_inner().into();
|
||||
|
||||
let index_uid = path.into_inner();
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = meilisearch
|
||||
.filters()
|
||||
.search_rules
|
||||
.get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
|
||||
let search_result = meilisearch.search(path.into_inner(), query).await;
|
||||
let search_result = meilisearch.search(index_uid, query).await;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
@@ -125,10 +155,6 @@ pub async fn search_with_url_query(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
// Tests that the nb_hits is always set to false
|
||||
#[cfg(test)]
|
||||
assert!(!search_result.exhaustive_nb_hits);
|
||||
|
||||
debug!("returns: {:?}", search_result);
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
@@ -140,12 +166,22 @@ pub async fn search_with_post(
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let query = params.into_inner();
|
||||
let mut query = params.into_inner();
|
||||
debug!("search called with params: {:?}", query);
|
||||
|
||||
let index_uid = path.into_inner();
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = meilisearch
|
||||
.filters()
|
||||
.search_rules
|
||||
.get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
|
||||
let search_result = meilisearch.search(path.into_inner(), query).await;
|
||||
let search_result = meilisearch.search(index_uid, query).await;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
@@ -153,10 +189,6 @@ pub async fn search_with_post(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
// Tests that the nb_hits is always set to false
|
||||
#[cfg(test)]
|
||||
assert!(!search_result.exhaustive_nb_hits);
|
||||
|
||||
debug!("returns: {:?}", search_result);
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use log::debug;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::index_controller::Update;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@@ -13,7 +13,7 @@ use crate::task::SummarizedTaskView;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
pub mod $attr {
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||
use log::debug;
|
||||
@@ -21,10 +21,11 @@ macro_rules! make_setting_route {
|
||||
use meilisearch_lib::milli::update::Setting;
|
||||
use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch};
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::task::SummarizedTaskView;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use $crate::analytics::Analytics;
|
||||
use $crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use $crate::extractors::sequential_extractor::SeqHandler;
|
||||
use $crate::task::SummarizedTaskView;
|
||||
|
||||
pub async fn delete(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, MeiliSearch>,
|
||||
@@ -98,19 +99,28 @@ macro_rules! make_setting_route {
|
||||
|
||||
pub fn resources() -> Resource {
|
||||
Resource::new($route)
|
||||
.route(web::get().to(get))
|
||||
.route(web::post().to(update))
|
||||
.route(web::delete().to(delete))
|
||||
.route(web::get().to(SeqHandler(get)))
|
||||
.route(web::$update_verb().to(SeqHandler(update)))
|
||||
.route(web::delete().to(SeqHandler(delete)))
|
||||
}
|
||||
}
|
||||
};
|
||||
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal) => {
|
||||
make_setting_route!($route, $type, $attr, $camelcase_attr, _analytics, |_, _| {});
|
||||
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal) => {
|
||||
make_setting_route!(
|
||||
$route,
|
||||
$update_verb,
|
||||
$type,
|
||||
$attr,
|
||||
$camelcase_attr,
|
||||
_analytics,
|
||||
|_, _| {}
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
make_setting_route!(
|
||||
"/filterable-attributes",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
filterable_attributes,
|
||||
"filterableAttributes",
|
||||
@@ -133,6 +143,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/sortable-attributes",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
sortable_attributes,
|
||||
"sortableAttributes",
|
||||
@@ -144,8 +155,8 @@ make_setting_route!(
|
||||
"SortableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"sortable_attributes": {
|
||||
"total": setting.as_ref().map(|sort| sort.len()).unwrap_or(0),
|
||||
"has_geo": setting.as_ref().map(|sort| sort.contains("_geo")).unwrap_or(false),
|
||||
"total": setting.as_ref().map(|sort| sort.len()),
|
||||
"has_geo": setting.as_ref().map(|sort| sort.contains("_geo")),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
@@ -155,13 +166,57 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/displayed-attributes",
|
||||
put,
|
||||
Vec<String>,
|
||||
displayed_attributes,
|
||||
"displayedAttributes"
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/typo-tolerance",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::TypoSettings,
|
||||
typo_tolerance,
|
||||
"typoTolerance",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::TypoSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"TypoTolerance Updated".to_string(),
|
||||
json!({
|
||||
"typo_tolerance": {
|
||||
"enabled": setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
|
||||
"disable_on_attributes": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/searchable-attributes",
|
||||
put,
|
||||
Vec<String>,
|
||||
searchable_attributes,
|
||||
"searchableAttributes",
|
||||
@@ -173,7 +228,7 @@ make_setting_route!(
|
||||
"SearchableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"searchable_attributes": {
|
||||
"total": setting.as_ref().map(|searchable| searchable.len()).unwrap_or(0),
|
||||
"total": setting.as_ref().map(|searchable| searchable.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
@@ -183,6 +238,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/stop-words",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
stop_words,
|
||||
"stopWords"
|
||||
@@ -190,6 +246,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/synonyms",
|
||||
put,
|
||||
std::collections::BTreeMap<String, Vec<String>>,
|
||||
synonyms,
|
||||
"synonyms"
|
||||
@@ -197,6 +254,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/distinct-attribute",
|
||||
put,
|
||||
String,
|
||||
distinct_attribute,
|
||||
"distinctAttribute"
|
||||
@@ -204,6 +262,7 @@ make_setting_route!(
|
||||
|
||||
make_setting_route!(
|
||||
"/ranking-rules",
|
||||
put,
|
||||
Vec<String>,
|
||||
ranking_rules,
|
||||
"rankingRules",
|
||||
@@ -223,14 +282,59 @@ make_setting_route!(
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/faceting",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::FacetingSettings,
|
||||
faceting,
|
||||
"faceting",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::FacetingSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Faceting Updated".to_string(),
|
||||
json!({
|
||||
"faceting": {
|
||||
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/pagination",
|
||||
patch,
|
||||
meilisearch_lib::index::updates::PaginationSettings,
|
||||
pagination,
|
||||
"pagination",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_lib::index::updates::PaginationSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Pagination Updated".to_string(),
|
||||
json!({
|
||||
"pagination": {
|
||||
"max_total_hits": setting.as_ref().and_then(|s| s.max_total_hits.set()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
macro_rules! generate_configure {
|
||||
($($mod:ident),*) => {
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::post().to(update_all))
|
||||
.route(web::get().to(get_all))
|
||||
.route(web::delete().to(delete_all)))
|
||||
.route(web::patch().to(SeqHandler(update_all)))
|
||||
.route(web::get().to(SeqHandler(get_all)))
|
||||
.route(web::delete().to(SeqHandler(delete_all))))
|
||||
$(.service($mod::resources()))*;
|
||||
}
|
||||
};
|
||||
@@ -244,7 +348,10 @@ generate_configure!(
|
||||
distinct_attribute,
|
||||
stop_words,
|
||||
synonyms,
|
||||
ranking_rules
|
||||
ranking_rules,
|
||||
typo_tolerance,
|
||||
pagination,
|
||||
faceting
|
||||
);
|
||||
|
||||
pub async fn update_all(
|
||||
@@ -263,15 +370,58 @@ pub async fn update_all(
|
||||
"sort_position": settings.ranking_rules.as_ref().set().map(|sort| sort.iter().position(|s| s == "sort")),
|
||||
},
|
||||
"searchable_attributes": {
|
||||
"total": settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()).unwrap_or(0),
|
||||
"total": settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
|
||||
},
|
||||
"sortable_attributes": {
|
||||
"total": settings.sortable_attributes.as_ref().set().map(|sort| sort.len()).unwrap_or(0),
|
||||
"has_geo": settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")).unwrap_or(false),
|
||||
"total": settings.sortable_attributes.as_ref().set().map(|sort| sort.len()),
|
||||
"has_geo": settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"filterable_attributes": {
|
||||
"total": settings.filterable_attributes.as_ref().set().map(|filter| filter.len()).unwrap_or(0),
|
||||
"has_geo": settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")).unwrap_or(false),
|
||||
"total": settings.filterable_attributes.as_ref().set().map(|filter| filter.len()),
|
||||
"has_geo": settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"typo_tolerance": {
|
||||
"enabled": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.enabled.as_ref().set())
|
||||
.copied(),
|
||||
"disable_on_attributes": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
},
|
||||
"faceting": {
|
||||
"max_values_per_facet": settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_values_per_facet.as_ref().set()),
|
||||
},
|
||||
"pagination": {
|
||||
"max_total_hits": settings.pagination
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_total_hits.as_ref().set()),
|
||||
},
|
||||
}),
|
||||
Some(&req),
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::debug;
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::task::{TaskListView, TaskView};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(get_all_tasks_status)))
|
||||
.service(web::resource("{task_id}").route(web::get().to(get_task_status)));
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UpdateIndexResponse {
|
||||
name: String,
|
||||
uid: String,
|
||||
created_at: DateTime<Utc>,
|
||||
updated_at: DateTime<Utc>,
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateParam {
|
||||
index_uid: String,
|
||||
task_id: u64,
|
||||
}
|
||||
|
||||
pub async fn get_task_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<UpdateParam>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Index Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": true }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let UpdateParam { index_uid, task_id } = index_uid.into_inner();
|
||||
|
||||
let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
}
|
||||
|
||||
pub async fn get_all_tasks_status(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish(
|
||||
"Index Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": false }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_index_task(index_uid.into_inner(), None, None)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
|
||||
debug!("returns: {:?}", tasks);
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
@@ -1,11 +1,13 @@
|
||||
use actix_web::{web, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use meilisearch_error::ResponseError;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
|
||||
@@ -24,6 +26,101 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/indexes").configure(indexes::configure));
|
||||
}
|
||||
|
||||
/// Extracts the raw values from the `StarOr` types and
|
||||
/// return None if a `StarOr::Star` is encountered.
|
||||
pub fn fold_star_or<T, O>(content: impl IntoIterator<Item = StarOr<T>>) -> Option<O>
|
||||
where
|
||||
O: FromIterator<T>,
|
||||
{
|
||||
content
|
||||
.into_iter()
|
||||
.map(|value| match value {
|
||||
StarOr::Star => None,
|
||||
StarOr::Other(val) => Some(val),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct Pagination {
|
||||
#[serde(default)]
|
||||
pub offset: usize,
|
||||
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
||||
pub limit: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct PaginationView<T> {
|
||||
pub results: Vec<T>,
|
||||
pub offset: usize,
|
||||
pub limit: usize,
|
||||
pub total: usize,
|
||||
}
|
||||
|
||||
impl Pagination {
|
||||
/// Given the full data to paginate, returns the selected section.
|
||||
pub fn auto_paginate_sized<T>(
|
||||
self,
|
||||
content: impl IntoIterator<Item = T> + ExactSizeIterator,
|
||||
) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let total = content.len();
|
||||
let content: Vec<_> = content
|
||||
.into_iter()
|
||||
.skip(self.offset)
|
||||
.take(self.limit)
|
||||
.collect();
|
||||
self.format_with(total, content)
|
||||
}
|
||||
|
||||
/// Given an iterator and the total number of elements, returns the selected section.
|
||||
pub fn auto_paginate_unsized<T>(
|
||||
self,
|
||||
total: usize,
|
||||
content: impl IntoIterator<Item = T>,
|
||||
) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let content: Vec<_> = content
|
||||
.into_iter()
|
||||
.skip(self.offset)
|
||||
.take(self.limit)
|
||||
.collect();
|
||||
self.format_with(total, content)
|
||||
}
|
||||
|
||||
/// Given the data already paginated + the total number of elements, it stores
|
||||
/// everything in a [PaginationResult].
|
||||
pub fn format_with<T>(self, total: usize, results: Vec<T>) -> PaginationView<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
PaginationView {
|
||||
results,
|
||||
offset: self.offset,
|
||||
limit: self.limit,
|
||||
total,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PaginationView<T> {
|
||||
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
|
||||
Self {
|
||||
offset,
|
||||
limit,
|
||||
results,
|
||||
total,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[serde(tag = "name")]
|
||||
@@ -54,8 +151,10 @@ pub struct ProcessedUpdateResult {
|
||||
#[serde(rename = "type")]
|
||||
pub update_type: UpdateType,
|
||||
pub duration: f64, // in seconds
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
pub processed_at: DateTime<Utc>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub processed_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -66,8 +165,10 @@ pub struct FailedUpdateResult {
|
||||
pub update_type: UpdateType,
|
||||
pub error: ResponseError,
|
||||
pub duration: f64, // in seconds
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
pub processed_at: DateTime<Utc>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub processed_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -76,9 +177,13 @@ pub struct EnqueuedUpdateResult {
|
||||
pub update_id: u64,
|
||||
#[serde(rename = "type")]
|
||||
pub update_type: UpdateType,
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub started_processing_at: Option<DateTime<Utc>>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "time::serde::rfc3339::option"
|
||||
)]
|
||||
pub started_processing_at: Option<OffsetDateTime>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -117,19 +222,18 @@ impl IndexUpdateResponse {
|
||||
/// Always return a 200 with:
|
||||
/// ```json
|
||||
/// {
|
||||
/// "status": "MeiliSearch is running"
|
||||
/// "status": "Meilisearch is running"
|
||||
/// }
|
||||
/// ```
|
||||
pub async fn running() -> HttpResponse {
|
||||
HttpResponse::Ok().json(serde_json::json!({ "status": "MeiliSearch is running" }))
|
||||
HttpResponse::Ok().json(serde_json::json!({ "status": "Meilisearch is running" }))
|
||||
}
|
||||
|
||||
async fn get_stats(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::STATS_GET }>, MeiliSearch>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let filters = meilisearch.filters();
|
||||
|
||||
let response = meilisearch.get_all_stats(&filters.indexes).await?;
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let response = meilisearch.get_all_stats(search_rules).await?;
|
||||
|
||||
debug!("returns: {:?}", response);
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
|
||||
@@ -1,45 +1,172 @@
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use meilisearch_error::ResponseError;
|
||||
use meilisearch_lib::tasks::task::TaskId;
|
||||
use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId};
|
||||
use meilisearch_lib::tasks::TaskFilter;
|
||||
use meilisearch_lib::MeiliSearch;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::{policies::*, GuardedData};
|
||||
use crate::task::{TaskListView, TaskView};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::task::{TaskListView, TaskStatus, TaskType, TaskView};
|
||||
|
||||
use super::fold_star_or;
|
||||
|
||||
const DEFAULT_LIMIT: fn() -> usize = || 20;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(get_tasks)))
|
||||
.service(web::resource("/{task_id}").route(web::get().to(get_task)));
|
||||
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
|
||||
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct TasksFilterQuery {
|
||||
#[serde(rename = "type")]
|
||||
type_: Option<CS<StarOr<TaskType>>>,
|
||||
status: Option<CS<StarOr<TaskStatus>>>,
|
||||
index_uid: Option<CS<StarOr<IndexUid>>>,
|
||||
#[serde(default = "DEFAULT_LIMIT")]
|
||||
limit: usize,
|
||||
from: Option<TaskId>,
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool {
|
||||
matches!((type_, content),
|
||||
(TaskType::IndexCreation, TaskContent::IndexCreation { .. })
|
||||
| (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. })
|
||||
| (TaskType::IndexDeletion, TaskContent::IndexDeletion { .. })
|
||||
| (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. })
|
||||
| (TaskType::DocumentDeletion, TaskContent::DocumentDeletion{ .. })
|
||||
| (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. })
|
||||
)
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool {
|
||||
events.last().map_or(false, |event| {
|
||||
matches!((status, event),
|
||||
(TaskStatus::Enqueued, TaskEvent::Created(_))
|
||||
| (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. })
|
||||
| (TaskStatus::Succeeded, TaskEvent::Succeeded { .. })
|
||||
| (TaskStatus::Failed, TaskEvent::Failed { .. }),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_tasks(
|
||||
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
|
||||
params: web::Query<TasksFilterQuery>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let TasksFilterQuery {
|
||||
type_,
|
||||
status,
|
||||
index_uid,
|
||||
limit,
|
||||
from,
|
||||
} = params.into_inner();
|
||||
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
|
||||
// We first transform a potential indexUid=* into a "not specified indexUid filter"
|
||||
// for every one of the filters: type, status, and indexUid.
|
||||
let type_: Option<Vec<_>> = type_.and_then(fold_star_or);
|
||||
let status: Option<Vec<_>> = status.and_then(fold_star_or);
|
||||
let index_uid: Option<Vec<_>> = index_uid.and_then(fold_star_or);
|
||||
|
||||
analytics.publish(
|
||||
"Tasks Seen".to_string(),
|
||||
json!({ "per_task_uid": false }),
|
||||
json!({
|
||||
"filtered_by_index_uid": index_uid.as_ref().map_or(false, |v| !v.is_empty()),
|
||||
"filtered_by_type": type_.as_ref().map_or(false, |v| !v.is_empty()),
|
||||
"filtered_by_status": status.as_ref().map_or(false, |v| !v.is_empty()),
|
||||
}),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let filters = meilisearch.filters().indexes.as_ref().map(|indexes| {
|
||||
let mut filters = TaskFilter::default();
|
||||
for index in indexes {
|
||||
filters.filter_index(index.to_string());
|
||||
// Then we filter on potential indexes and make sure that the search filter
|
||||
// restrictions are also applied.
|
||||
let indexes_filters = match index_uid {
|
||||
Some(indexes) => {
|
||||
let mut filters = TaskFilter::default();
|
||||
for name in indexes {
|
||||
if search_rules.is_index_authorized(&name) {
|
||||
filters.filter_index(name.to_string());
|
||||
}
|
||||
}
|
||||
Some(filters)
|
||||
}
|
||||
filters
|
||||
});
|
||||
None => {
|
||||
if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
Some(filters)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let tasks: TaskListView = meilisearch
|
||||
.list_tasks(filters, None, None)
|
||||
// Then we complete the task filter with other potential status and types filters.
|
||||
let filters = if type_.is_some() || status.is_some() {
|
||||
let mut filters = indexes_filters.unwrap_or_default();
|
||||
filters.filter_fn(move |task| {
|
||||
let matches_type = match &type_ {
|
||||
Some(types) => types
|
||||
.iter()
|
||||
.any(|t| task_type_matches_content(t, &task.content)),
|
||||
None => true,
|
||||
};
|
||||
|
||||
let matches_status = match &status {
|
||||
Some(statuses) => statuses
|
||||
.iter()
|
||||
.any(|t| task_status_matches_events(t, &task.events)),
|
||||
None => true,
|
||||
};
|
||||
|
||||
matches_type && matches_status
|
||||
});
|
||||
Some(filters)
|
||||
} else {
|
||||
indexes_filters
|
||||
};
|
||||
|
||||
// We +1 just to know if there is more after this "page" or not.
|
||||
let limit = limit.saturating_add(1);
|
||||
|
||||
let mut tasks_results: Vec<_> = meilisearch
|
||||
.list_tasks(filters, Some(limit), from)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(TaskView::from)
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
.collect();
|
||||
|
||||
// If we were able to fetch the number +1 tasks we asked
|
||||
// it means that there is more to come.
|
||||
let next = if tasks_results.len() == limit {
|
||||
tasks_results.pop().map(|t| t.uid)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let from = tasks_results.first().map(|t| t.uid);
|
||||
|
||||
let tasks = TaskListView {
|
||||
results: tasks_results,
|
||||
limit: limit.saturating_sub(1),
|
||||
from,
|
||||
next,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
@@ -56,13 +183,16 @@ async fn get_task(
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let filters = meilisearch.filters().indexes.as_ref().map(|indexes| {
|
||||
let search_rules = &meilisearch.filters().search_rules;
|
||||
let filters = if search_rules.is_index_authorized("*") {
|
||||
None
|
||||
} else {
|
||||
let mut filters = TaskFilter::default();
|
||||
for index in indexes {
|
||||
filters.filter_index(index.to_string());
|
||||
for (index, _policy) in search_rules.clone() {
|
||||
filters.filter_index(index);
|
||||
}
|
||||
filters
|
||||
});
|
||||
Some(filters)
|
||||
};
|
||||
|
||||
let task: TaskView = meilisearch
|
||||
.get_task(task_id.into_inner(), filters)
|
||||
|
||||
@@ -1,56 +1,137 @@
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use meilisearch_error::ResponseError;
|
||||
use std::error::Error;
|
||||
use std::fmt::{self, Write};
|
||||
use std::str::FromStr;
|
||||
use std::write;
|
||||
|
||||
use meilisearch_lib::index::{Settings, Unchecked};
|
||||
use meilisearch_lib::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_lib::tasks::batch::BatchId;
|
||||
use meilisearch_lib::tasks::task::{
|
||||
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
|
||||
};
|
||||
use serde::{Serialize, Serializer};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
use crate::AUTOBATCHING_ENABLED;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum TaskType {
|
||||
pub enum TaskType {
|
||||
IndexCreation,
|
||||
IndexUpdate,
|
||||
IndexDeletion,
|
||||
DocumentAddition,
|
||||
DocumentPartial,
|
||||
DocumentAdditionOrUpdate,
|
||||
DocumentDeletion,
|
||||
SettingsUpdate,
|
||||
ClearAll,
|
||||
DumpCreation,
|
||||
}
|
||||
|
||||
impl From<TaskContent> for TaskType {
|
||||
fn from(other: TaskContent) -> Self {
|
||||
match other {
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..
|
||||
} => TaskType::DocumentAddition,
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
|
||||
..
|
||||
} => TaskType::DocumentPartial,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion,
|
||||
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
||||
TaskContent::IndexDeletion => TaskType::IndexDeletion,
|
||||
TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
|
||||
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
|
||||
_ => unreachable!("unexpected task type"),
|
||||
TaskContent::IndexDeletion { .. } => TaskType::IndexDeletion,
|
||||
TaskContent::DocumentAddition { .. } => TaskType::DocumentAdditionOrUpdate,
|
||||
TaskContent::DocumentDeletion { .. } => TaskType::DocumentDeletion,
|
||||
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
|
||||
TaskContent::Dump { .. } => TaskType::DumpCreation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug)]
|
||||
pub struct TaskTypeError {
|
||||
invalid_type: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for TaskTypeError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"invalid task type `{}`, expecting one of: \
|
||||
indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \
|
||||
documentDeletion, settingsUpdate, dumpCreation",
|
||||
self.invalid_type
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for TaskTypeError {}
|
||||
|
||||
impl FromStr for TaskType {
|
||||
type Err = TaskTypeError;
|
||||
|
||||
fn from_str(type_: &str) -> Result<Self, TaskTypeError> {
|
||||
if type_.eq_ignore_ascii_case("indexCreation") {
|
||||
Ok(TaskType::IndexCreation)
|
||||
} else if type_.eq_ignore_ascii_case("indexUpdate") {
|
||||
Ok(TaskType::IndexUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("indexDeletion") {
|
||||
Ok(TaskType::IndexDeletion)
|
||||
} else if type_.eq_ignore_ascii_case("documentAdditionOrUpdate") {
|
||||
Ok(TaskType::DocumentAdditionOrUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("documentDeletion") {
|
||||
Ok(TaskType::DocumentDeletion)
|
||||
} else if type_.eq_ignore_ascii_case("settingsUpdate") {
|
||||
Ok(TaskType::SettingsUpdate)
|
||||
} else if type_.eq_ignore_ascii_case("dumpCreation") {
|
||||
Ok(TaskType::DumpCreation)
|
||||
} else {
|
||||
Err(TaskTypeError {
|
||||
invalid_type: type_.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum TaskStatus {
|
||||
pub enum TaskStatus {
|
||||
Enqueued,
|
||||
Processing,
|
||||
Succeeded,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TaskStatusError {
|
||||
invalid_status: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for TaskStatusError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"invalid task status `{}`, expecting one of: \
|
||||
enqueued, processing, succeeded, or failed",
|
||||
self.invalid_status,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for TaskStatusError {}
|
||||
|
||||
impl FromStr for TaskStatus {
|
||||
type Err = TaskStatusError;
|
||||
|
||||
fn from_str(status: &str) -> Result<Self, TaskStatusError> {
|
||||
if status.eq_ignore_ascii_case("enqueued") {
|
||||
Ok(TaskStatus::Enqueued)
|
||||
} else if status.eq_ignore_ascii_case("processing") {
|
||||
Ok(TaskStatus::Processing)
|
||||
} else if status.eq_ignore_ascii_case("succeeded") {
|
||||
Ok(TaskStatus::Succeeded)
|
||||
} else if status.eq_ignore_ascii_case("failed") {
|
||||
Ok(TaskStatus::Failed)
|
||||
} else {
|
||||
Err(TaskStatusError {
|
||||
invalid_status: status.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(untagged)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
@@ -74,16 +155,56 @@ enum TaskDetails {
|
||||
},
|
||||
#[serde(rename_all = "camelCase")]
|
||||
ClearAll { deleted_documents: Option<u64> },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
Dump { dump_uid: String },
|
||||
}
|
||||
|
||||
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
|
||||
/// https://github.com/time-rs/time/issues/378.
|
||||
/// This code is a port of the old code of time that was removed in 0.2.
|
||||
fn serialize_duration<S: Serializer>(
|
||||
duration: &Option<Duration>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
match duration {
|
||||
Some(duration) => {
|
||||
let duration_str = duration.to_string();
|
||||
serializer.serialize_str(&duration_str)
|
||||
// technically speaking, negative duration is not valid ISO 8601
|
||||
if duration.is_negative() {
|
||||
return serializer.serialize_none();
|
||||
}
|
||||
|
||||
const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds();
|
||||
let secs = duration.whole_seconds();
|
||||
let days = secs / SECS_PER_DAY;
|
||||
let secs = secs - days * SECS_PER_DAY;
|
||||
let hasdate = days != 0;
|
||||
let nanos = duration.subsec_nanoseconds();
|
||||
let hastime = (secs != 0 || nanos != 0) || !hasdate;
|
||||
|
||||
// all the following unwrap can't fail
|
||||
let mut res = String::new();
|
||||
write!(&mut res, "P").unwrap();
|
||||
|
||||
if hasdate {
|
||||
write!(&mut res, "{}D", days).unwrap();
|
||||
}
|
||||
|
||||
const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds();
|
||||
const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds();
|
||||
|
||||
if hastime {
|
||||
if nanos == 0 {
|
||||
write!(&mut res, "T{}S", secs).unwrap();
|
||||
} else if nanos % NANOS_PER_MILLI == 0 {
|
||||
write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap();
|
||||
} else if nanos % NANOS_PER_MICRO == 0 {
|
||||
write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap();
|
||||
} else {
|
||||
write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
serializer.serialize_str(&res)
|
||||
}
|
||||
None => serializer.serialize_none(),
|
||||
}
|
||||
@@ -92,8 +213,8 @@ fn serialize_duration<S: Serializer>(
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TaskView {
|
||||
uid: TaskId,
|
||||
index_uid: String,
|
||||
pub uid: TaskId,
|
||||
index_uid: Option<String>,
|
||||
status: TaskStatus,
|
||||
#[serde(rename = "type")]
|
||||
task_type: TaskType,
|
||||
@@ -103,53 +224,56 @@ pub struct TaskView {
|
||||
error: Option<ResponseError>,
|
||||
#[serde(serialize_with = "serialize_duration")]
|
||||
duration: Option<Duration>,
|
||||
enqueued_at: DateTime<Utc>,
|
||||
started_at: Option<DateTime<Utc>>,
|
||||
finished_at: Option<DateTime<Utc>>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
enqueued_at: OffsetDateTime,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
started_at: Option<OffsetDateTime>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
finished_at: Option<OffsetDateTime>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
batch_uid: Option<Option<BatchId>>,
|
||||
}
|
||||
|
||||
impl From<Task> for TaskView {
|
||||
fn from(task: Task) -> Self {
|
||||
let index_uid = task.index_uid().map(String::from);
|
||||
let Task {
|
||||
id,
|
||||
index_uid,
|
||||
content,
|
||||
events,
|
||||
} = task;
|
||||
|
||||
let (task_type, mut details) = match content {
|
||||
TaskContent::DocumentAddition {
|
||||
merge_strategy,
|
||||
documents_count,
|
||||
..
|
||||
documents_count, ..
|
||||
} => {
|
||||
let details = TaskDetails::DocumentAddition {
|
||||
received_documents: documents_count,
|
||||
indexed_documents: None,
|
||||
};
|
||||
|
||||
let task_type = match merge_strategy {
|
||||
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial,
|
||||
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition,
|
||||
_ => unreachable!("Unexpected document merge strategy."),
|
||||
};
|
||||
|
||||
(task_type, Some(details))
|
||||
(TaskType::DocumentAdditionOrUpdate, Some(details))
|
||||
}
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
|
||||
TaskContent::DocumentDeletion {
|
||||
deletion: DocumentDeletion::Ids(ids),
|
||||
..
|
||||
} => (
|
||||
TaskType::DocumentDeletion,
|
||||
Some(TaskDetails::DocumentDeletion {
|
||||
received_document_ids: ids.len(),
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
|
||||
TaskType::ClearAll,
|
||||
TaskContent::DocumentDeletion {
|
||||
deletion: DocumentDeletion::Clear,
|
||||
..
|
||||
} => (
|
||||
TaskType::DocumentDeletion,
|
||||
Some(TaskDetails::ClearAll {
|
||||
deleted_documents: None,
|
||||
}),
|
||||
),
|
||||
TaskContent::IndexDeletion => (
|
||||
TaskContent::IndexDeletion { .. } => (
|
||||
TaskType::IndexDeletion,
|
||||
Some(TaskDetails::ClearAll {
|
||||
deleted_documents: None,
|
||||
@@ -159,14 +283,18 @@ impl From<Task> for TaskView {
|
||||
TaskType::SettingsUpdate,
|
||||
Some(TaskDetails::Settings { settings }),
|
||||
),
|
||||
TaskContent::IndexCreation { primary_key } => (
|
||||
TaskContent::IndexCreation { primary_key, .. } => (
|
||||
TaskType::IndexCreation,
|
||||
Some(TaskDetails::IndexInfo { primary_key }),
|
||||
),
|
||||
TaskContent::IndexUpdate { primary_key } => (
|
||||
TaskContent::IndexUpdate { primary_key, .. } => (
|
||||
TaskType::IndexUpdate,
|
||||
Some(TaskDetails::IndexInfo { primary_key }),
|
||||
),
|
||||
TaskContent::Dump { uid } => (
|
||||
TaskType::DumpCreation,
|
||||
Some(TaskDetails::Dump { dump_uid: uid }),
|
||||
),
|
||||
};
|
||||
|
||||
// An event always has at least one event: "Created"
|
||||
@@ -174,7 +302,7 @@ impl From<Task> for TaskView {
|
||||
TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None),
|
||||
TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None),
|
||||
TaskEvent::Processing(_) => (TaskStatus::Processing, None, None),
|
||||
TaskEvent::Succeded { timestamp, result } => {
|
||||
TaskEvent::Succeeded { timestamp, result } => {
|
||||
match (result, &mut details) {
|
||||
(
|
||||
TaskResult::DocumentAddition {
|
||||
@@ -252,9 +380,19 @@ impl From<Task> for TaskView {
|
||||
|
||||
let duration = finished_at.zip(started_at).map(|(tf, ts)| (tf - ts));
|
||||
|
||||
let batch_uid = if AUTOBATCHING_ENABLED.load(std::sync::atomic::Ordering::Relaxed) {
|
||||
let id = events.iter().find_map(|e| match e {
|
||||
TaskEvent::Batched { batch_id, .. } => Some(*batch_id),
|
||||
_ => None,
|
||||
});
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Self {
|
||||
uid: id,
|
||||
index_uid: index_uid.into_inner(),
|
||||
index_uid,
|
||||
status,
|
||||
task_type,
|
||||
details,
|
||||
@@ -263,30 +401,29 @@ impl From<Task> for TaskView {
|
||||
enqueued_at,
|
||||
started_at,
|
||||
finished_at,
|
||||
batch_uid,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct TaskListView {
|
||||
results: Vec<TaskView>,
|
||||
}
|
||||
|
||||
impl From<Vec<TaskView>> for TaskListView {
|
||||
fn from(results: Vec<TaskView>) -> Self {
|
||||
Self { results }
|
||||
}
|
||||
pub results: Vec<TaskView>,
|
||||
pub limit: usize,
|
||||
pub from: Option<TaskId>,
|
||||
pub next: Option<TaskId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SummarizedTaskView {
|
||||
uid: TaskId,
|
||||
index_uid: String,
|
||||
task_uid: TaskId,
|
||||
index_uid: Option<String>,
|
||||
status: TaskStatus,
|
||||
#[serde(rename = "type")]
|
||||
task_type: TaskType,
|
||||
enqueued_at: DateTime<Utc>,
|
||||
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
||||
enqueued_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl From<Task> for SummarizedTaskView {
|
||||
@@ -303,8 +440,8 @@ impl From<Task> for SummarizedTaskView {
|
||||
};
|
||||
|
||||
Self {
|
||||
uid: other.id,
|
||||
index_uid: other.index_uid.to_string(),
|
||||
task_uid: other.id,
|
||||
index_uid: other.index_uid().map(String::from),
|
||||
status: TaskStatus::Enqueued,
|
||||
task_type: other.content.into(),
|
||||
enqueued_at,
|
||||
|
||||
BIN
meilisearch-http/tests/assets/v1_v0.20.0_movies.dump
Normal file
BIN
meilisearch-http/tests/assets/v1_v0.20.0_movies.dump
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
meilisearch-http/tests/assets/v2_v0.21.1_movies.dump
Normal file
BIN
meilisearch-http/tests/assets/v2_v0.21.1_movies.dump
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
meilisearch-http/tests/assets/v3_v0.24.0_movies.dump
Normal file
BIN
meilisearch-http/tests/assets/v3_v0.24.0_movies.dump
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
meilisearch-http/tests/assets/v4_v0.25.2_movies.dump
Normal file
BIN
meilisearch-http/tests/assets/v4_v0.25.2_movies.dump
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
meilisearch-http/tests/assets/v5_v0.28.0_test_dump.dump
Normal file
BIN
meilisearch-http/tests/assets/v5_v0.28.0_test_dump.dump
Normal file
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -1,56 +1,67 @@
|
||||
use crate::common::Server;
|
||||
use chrono::{Duration, Utc};
|
||||
use maplit::hashmap;
|
||||
use ::time::format_description::well_known::Rfc3339;
|
||||
use maplit::{hashmap, hashset};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), &'static str>> =
|
||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||
Lazy::new(|| {
|
||||
hashmap! {
|
||||
("POST", "/indexes/products/search") => "search",
|
||||
("GET", "/indexes/products/search") => "search",
|
||||
("POST", "/indexes/products/documents") => "documents.add",
|
||||
("GET", "/indexes/products/documents") => "documents.get",
|
||||
("GET", "/indexes/products/documents/0") => "documents.get",
|
||||
("DELETE", "/indexes/products/documents/0") => "documents.delete",
|
||||
("GET", "/tasks") => "tasks.get",
|
||||
("GET", "/indexes/products/tasks") => "tasks.get",
|
||||
("GET", "/indexes/products/tasks/0") => "tasks.get",
|
||||
("PUT", "/indexes/products/") => "indexes.update",
|
||||
("GET", "/indexes/products/") => "indexes.get",
|
||||
("DELETE", "/indexes/products/") => "indexes.delete",
|
||||
("POST", "/indexes") => "indexes.create",
|
||||
("GET", "/indexes") => "indexes.get",
|
||||
("GET", "/indexes/products/settings") => "settings.get",
|
||||
("GET", "/indexes/products/settings/displayed-attributes") => "settings.get",
|
||||
("GET", "/indexes/products/settings/distinct-attribute") => "settings.get",
|
||||
("GET", "/indexes/products/settings/filterable-attributes") => "settings.get",
|
||||
("GET", "/indexes/products/settings/ranking-rules") => "settings.get",
|
||||
("GET", "/indexes/products/settings/searchable-attributes") => "settings.get",
|
||||
("GET", "/indexes/products/settings/sortable-attributes") => "settings.get",
|
||||
("GET", "/indexes/products/settings/stop-words") => "settings.get",
|
||||
("GET", "/indexes/products/settings/synonyms") => "settings.get",
|
||||
("DELETE", "/indexes/products/settings") => "settings.update",
|
||||
("POST", "/indexes/products/settings") => "settings.update",
|
||||
("POST", "/indexes/products/settings/displayed-attributes") => "settings.update",
|
||||
("POST", "/indexes/products/settings/distinct-attribute") => "settings.update",
|
||||
("POST", "/indexes/products/settings/filterable-attributes") => "settings.update",
|
||||
("POST", "/indexes/products/settings/ranking-rules") => "settings.update",
|
||||
("POST", "/indexes/products/settings/searchable-attributes") => "settings.update",
|
||||
("POST", "/indexes/products/settings/sortable-attributes") => "settings.update",
|
||||
("POST", "/indexes/products/settings/stop-words") => "settings.update",
|
||||
("POST", "/indexes/products/settings/synonyms") => "settings.update",
|
||||
("GET", "/indexes/products/stats") => "stats.get",
|
||||
("GET", "/stats") => "stats.get",
|
||||
("POST", "/dumps") => "dumps.create",
|
||||
("GET", "/dumps/0/status") => "dumps.get",
|
||||
("GET", "/version") => "version",
|
||||
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("GET", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("POST", "/indexes/products/documents") => hashset!{"documents.add", "documents.*", "*"},
|
||||
("GET", "/indexes/products/documents") => hashset!{"documents.get", "documents.*", "*"},
|
||||
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "documents.*", "*"},
|
||||
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "documents.*", "*"},
|
||||
("GET", "/tasks") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||
("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||
("GET", "/tasks/0") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||
("PATCH", "/indexes/products/") => hashset!{"indexes.update", "indexes.*", "*"},
|
||||
("GET", "/indexes/products/") => hashset!{"indexes.get", "indexes.*", "*"},
|
||||
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "indexes.*", "*"},
|
||||
("POST", "/indexes") => hashset!{"indexes.create", "indexes.*", "*"},
|
||||
("GET", "/indexes") => hashset!{"indexes.get", "indexes.*", "*"},
|
||||
("GET", "/indexes/products/settings") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/ranking-rules") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "settings.*", "*"},
|
||||
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PATCH", "/indexes/products/settings") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PATCH", "/indexes/products/settings/typo-tolerance") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("PUT", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "settings.*", "*"},
|
||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||
("GET", "/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||
("POST", "/dumps") => hashset!{"dumps.create", "dumps.*", "*"},
|
||||
("GET", "/version") => hashset!{"version", "*"},
|
||||
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
||||
("GET", "/keys/mykey/") => hashset!{"keys.get", "*"},
|
||||
("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"},
|
||||
("POST", "/keys") => hashset!{"keys.create", "*"},
|
||||
("GET", "/keys") => hashset!{"keys.get", "*"},
|
||||
}
|
||||
});
|
||||
|
||||
static ALL_ACTIONS: Lazy<HashSet<&'static str>> =
|
||||
Lazy::new(|| AUTHORIZATIONS.values().cloned().collect());
|
||||
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
||||
AUTHORIZATIONS
|
||||
.values()
|
||||
.cloned()
|
||||
.reduce(|l, r| l.union(&r).cloned().collect())
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||
json!({"message": "The provided API key is invalid.",
|
||||
@@ -61,6 +72,7 @@ static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_expired_key() {
|
||||
use std::{thread, time};
|
||||
|
||||
@@ -70,11 +82,11 @@ async fn error_access_expired_key() {
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ALL_ACTIONS.clone(),
|
||||
"expiresAt": (Utc::now() + Duration::seconds(1)),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
@@ -86,12 +98,19 @@ async fn error_access_expired_key() {
|
||||
for (method, route) in AUTHORIZATIONS.keys() {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_unauthorized_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
@@ -99,11 +118,11 @@ async fn error_access_unauthorized_index() {
|
||||
let content = json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ALL_ACTIONS.clone(),
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
@@ -116,172 +135,171 @@ async fn error_access_unauthorized_index() {
|
||||
{
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_unauthorized_action() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// Patch API key letting all rights but the needed one.
|
||||
let content = json!({
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| a != action).collect::<Vec<_>>(),
|
||||
"indexes": ["products"],
|
||||
"actions": ALL_ACTIONS.difference(action).collect::<Vec<_>>(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_master_key() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
// master key must have access to all routes.
|
||||
for ((method, route), _) in AUTHORIZATIONS.iter() {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?}",
|
||||
method,
|
||||
route
|
||||
);
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
});
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": [action],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||
// Patch API key letting only the needed action.
|
||||
let content = json!({
|
||||
"actions": [action],
|
||||
});
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
|
||||
// Patch API key using action all action.
|
||||
let content = json!({
|
||||
"actions": ["*"],
|
||||
});
|
||||
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?} with action: {:?}",
|
||||
method,
|
||||
route,
|
||||
action
|
||||
);
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": [],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
});
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": [action],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
// Patch API key letting only the needed action.
|
||||
let content = json!({
|
||||
"actions": [action],
|
||||
});
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
|
||||
// Patch API key using action all action.
|
||||
let content = json!({
|
||||
"actions": ["*"],
|
||||
});
|
||||
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (_, code) = server.patch_api_key(&key, content).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
server.use_api_key(&key);
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
assert_ne!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"on route: {:?} - {:?} with action: {:?}",
|
||||
method,
|
||||
route,
|
||||
action
|
||||
);
|
||||
assert_ne!(code, 403);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_stats_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["stats.get"],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -289,7 +307,7 @@ async fn access_authorized_stats_restricted_index() {
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
// key should have access on `products` index.
|
||||
assert!(response["indexes"].get("products").is_some());
|
||||
@@ -299,28 +317,29 @@ async fn access_authorized_stats_restricted_index() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn access_authorized_stats_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["stats.get"],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -328,7 +347,7 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
// key should have access on `products` index.
|
||||
assert!(response["indexes"].get("products").is_some());
|
||||
@@ -338,38 +357,39 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn list_authorized_indexes_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["indexes.get"],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
let response = response.as_array().unwrap();
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||
|
||||
@@ -378,38 +398,39 @@ async fn list_authorized_indexes_restricted_index() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn list_authorized_indexes_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["indexes.get"],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
assert_eq!(code, 200);
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
let response = response.as_array().unwrap();
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|index| index["uid"] == "products"));
|
||||
|
||||
@@ -420,26 +441,26 @@ async fn list_authorized_indexes_no_index_restriction() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["tasks.get"],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -447,7 +468,7 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
println!("{}", response);
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
@@ -460,26 +481,26 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(Some("id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
// create index `products`
|
||||
let index = server.index("products");
|
||||
let (_, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(code, 202);
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
index.wait_task(0).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["tasks.get"],
|
||||
"expiresAt": Utc::now() + Duration::hours(1),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -487,7 +508,7 @@ async fn list_authorized_tasks_no_index_restriction() {
|
||||
server.use_api_key(&key);
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
@@ -505,11 +526,12 @@ async fn error_creating_index_without_action() {
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "indexes.create").collect::<Vec<_>>(),
|
||||
// Give all action but the ones allowing to create an index.
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| !AUTHORIZATIONS.get(&("POST","/indexes")).unwrap().contains(a)).collect::<Vec<_>>(),
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -533,8 +555,8 @@ async fn error_creating_index_without_action() {
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{:?}", response);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
@@ -544,8 +566,8 @@ async fn error_creating_index_without_action() {
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
|
||||
@@ -554,8 +576,8 @@ async fn error_creating_index_without_action() {
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
|
||||
@@ -576,7 +598,7 @@ async fn lazy_create_index() {
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
@@ -593,13 +615,13 @@ async fn lazy_create_index() {
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{:?}", response);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add settings route
|
||||
@@ -607,24 +629,24 @@ async fn lazy_create_index() {
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let index = server.index("test2");
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(code, 202);
|
||||
let task_id = response["uid"].as_u64().unwrap();
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
mod api_keys;
|
||||
mod authorization;
|
||||
mod payload;
|
||||
mod tenant_token;
|
||||
|
||||
use crate::common::Server;
|
||||
use actix_web::http::StatusCode;
|
||||
@@ -12,6 +13,15 @@ impl Server {
|
||||
self.service.api_key = Some(api_key.as_ref().to_string());
|
||||
}
|
||||
|
||||
/// Fetch and use the default admin key for nexts http requests.
|
||||
pub async fn use_admin_key(&mut self, master_key: impl AsRef<str>) {
|
||||
self.use_api_key(master_key);
|
||||
let (response, code) = self.list_api_keys().await;
|
||||
assert_eq!(200, code, "{:?}", response);
|
||||
let admin_key = &response["results"][1]["key"];
|
||||
self.use_api_key(admin_key.as_str().unwrap());
|
||||
}
|
||||
|
||||
pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) {
|
||||
let url = "/keys";
|
||||
self.service.post(url, content).await
|
||||
|
||||
584
meilisearch-http/tests/auth/tenant_token.rs
Normal file
584
meilisearch-http/tests/auth/tenant_token.rs
Normal file
@@ -0,0 +1,584 @@
|
||||
use crate::common::Server;
|
||||
use ::time::format_description::well_known::Rfc3339;
|
||||
use maplit::hashmap;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::HashMap;
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||
|
||||
fn generate_tenant_token(
|
||||
parent_uid: impl AsRef<str>,
|
||||
parent_key: impl AsRef<str>,
|
||||
mut body: HashMap<&str, Value>,
|
||||
) -> String {
|
||||
use jsonwebtoken::{encode, EncodingKey, Header};
|
||||
|
||||
let parent_uid = parent_uid.as_ref();
|
||||
body.insert("apiKeyUid", json!(parent_uid));
|
||||
encode(
|
||||
&Header::default(),
|
||||
&body,
|
||||
&EncodingKey::from_secret(parent_key.as_ref().as_bytes()),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"id": "287947",
|
||||
"color": ["green", "blue"]
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537",
|
||||
"color": ["yellow", "blue"]
|
||||
},
|
||||
{
|
||||
"title": "Escape Room",
|
||||
"id": "522681",
|
||||
"color": ["yellow", "red"]
|
||||
},
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": ["green", "red"]
|
||||
},
|
||||
{
|
||||
"title": "Glass",
|
||||
"id": "450465",
|
||||
"color": ["blue", "red"]
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||
json!({"message": "The provided API key is invalid.",
|
||||
"code": "invalid_api_key",
|
||||
"type": "auth",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||
})
|
||||
});
|
||||
|
||||
static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
vec![
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
]
|
||||
});
|
||||
|
||||
static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
vec![
|
||||
// no search action
|
||||
json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sales"],
|
||||
"actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(),
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
// bad index
|
||||
json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
]
|
||||
});
|
||||
|
||||
macro_rules! compute_authorized_search {
|
||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
drop(index);
|
||||
|
||||
for key_content in ACCEPTED_KEYS.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
.search(json!({ "filter": $filter }), |response, code| {
|
||||
assert_eq!(
|
||||
code, 200,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response, tenant_token, key_content
|
||||
);
|
||||
assert_eq!(
|
||||
response["hits"].as_array().unwrap().len(),
|
||||
$expected_count,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response,
|
||||
tenant_token,
|
||||
key_content
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! compute_forbidden_search {
|
||||
($tenant_tokens:expr, $parent_keys:expr) => {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
drop(index);
|
||||
|
||||
for key_content in $parent_keys.iter() {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
let (response, code) = server.add_api_key(key_content.clone()).await;
|
||||
assert_eq!(code, 201, "{:?}", response);
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
for tenant_token in $tenant_tokens.iter() {
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
||||
server.use_api_key(&web_token);
|
||||
let index = server.index("sales");
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(
|
||||
response,
|
||||
INVALID_RESPONSE.clone(),
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response,
|
||||
tenant_token,
|
||||
key_content
|
||||
);
|
||||
assert_eq!(
|
||||
code, 403,
|
||||
"{} using tenant_token: {:?} generated with parent_key: {:?}",
|
||||
response, tenant_token, key_content
|
||||
);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn search_authorized_simple_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
];
|
||||
|
||||
compute_authorized_search!(tenant_tokens, {}, 5);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
// filter on sales should override filters on *
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_authorized_search!(tenant_tokens, {}, 3);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn filter_search_authorized_filter_token() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": "color = blue"}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {"filter": ["color = blue"]}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
// filter on sales should override filters on *
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": "color = blue"}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {"filter": "color = green"},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sales": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_authorized_search!(tenant_tokens, "color = yellow", 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_search_token_forbidden_parent_key() {
|
||||
let tenant_tokens = vec![
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_forbidden_search!(tenant_tokens, REFUSED_KEYS);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_search_forbidden_token() {
|
||||
let tenant_tokens = vec![
|
||||
// bad index
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": {}}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"products": Value::Null}),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
// expired token
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"*": Value::Null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": {}}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({"sales": Value::Null}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_forbidden_search!(tenant_tokens, ACCEPTED_KEYS);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_forbidden_routes() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
if !actions.contains("search") {
|
||||
let (response, code) = server.dummy_request(method, route).await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_expired_parent_key() {
|
||||
use std::{thread, time};
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while parent_key is not expired
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
|
||||
// wait until the key is expired.
|
||||
thread::sleep(time::Duration::new(1, 0));
|
||||
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn error_access_modified_token() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(code, 201);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
let key = response["key"].as_str().unwrap();
|
||||
let uid = response["uid"].as_str().unwrap();
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["products"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
let web_token = generate_tenant_token(&uid, &key, tenant_token);
|
||||
server.use_api_key(&web_token);
|
||||
|
||||
// test search request while web_token is valid
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||
assert_ne!(code, 403);
|
||||
|
||||
let tenant_token = hashmap! {
|
||||
"searchRules" => json!(["*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
};
|
||||
|
||||
let alt = generate_tenant_token(&uid, &key, tenant_token);
|
||||
let altered_token = [
|
||||
web_token.split('.').next().unwrap(),
|
||||
alt.split('.').nth(1).unwrap(),
|
||||
web_token.split('.').nth(2).unwrap(),
|
||||
]
|
||||
.join(".");
|
||||
|
||||
server.use_api_key(&altered_token);
|
||||
let (response, code) = server
|
||||
.dummy_request("POST", "/indexes/products/search")
|
||||
.await;
|
||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||
assert_eq!(code, 403);
|
||||
}
|
||||
@@ -1,32 +1,16 @@
|
||||
use std::{
|
||||
fmt::Write,
|
||||
panic::{catch_unwind, resume_unwind, UnwindSafe},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use paste::paste;
|
||||
use serde_json::{json, Value};
|
||||
use tokio::time::sleep;
|
||||
use urlencoding::encode;
|
||||
|
||||
use super::service::Service;
|
||||
|
||||
macro_rules! make_settings_test_routes {
|
||||
($($name:ident),+) => {
|
||||
$(paste! {
|
||||
pub async fn [<update_$name>](&self, value: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
|
||||
self.service.post(url, value).await
|
||||
}
|
||||
|
||||
pub async fn [<get_$name>](&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
|
||||
self.service.get(url).await
|
||||
}
|
||||
})*
|
||||
};
|
||||
}
|
||||
|
||||
pub struct Index<'a> {
|
||||
pub uid: String,
|
||||
pub service: &'a Service,
|
||||
@@ -46,7 +30,7 @@ impl Index<'_> {
|
||||
.post_str(url, include_str!("../assets/test_set.json"))
|
||||
.await;
|
||||
assert_eq!(code, 202);
|
||||
let update_id = response["uid"].as_i64().unwrap();
|
||||
let update_id = response["taskUid"].as_i64().unwrap();
|
||||
self.wait_task(update_id as u64).await;
|
||||
update_id as u64
|
||||
}
|
||||
@@ -65,7 +49,7 @@ impl Index<'_> {
|
||||
});
|
||||
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
|
||||
|
||||
self.service.put(url, body).await
|
||||
self.service.patch(url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete(&self) -> (Value, StatusCode) {
|
||||
@@ -106,55 +90,67 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try 10 times to get status, or panic to not wait forever
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
for _ in 0..10 {
|
||||
for _ in 0..100 {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(status_code, 200, "response: {}", response);
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
|
||||
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||
return response;
|
||||
}
|
||||
|
||||
sleep(Duration::from_secs(1)).await;
|
||||
// wait 0.5 second.
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/tasks/{}", self.uid, update_id);
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn list_tasks(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/tasks", self.uid);
|
||||
let url = format!("/tasks?indexUid={}", self.uid);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) {
|
||||
let mut url = format!("/tasks?indexUid={}", self.uid);
|
||||
if !type_.is_empty() {
|
||||
let _ = write!(url, "&type={}", type_.join(","));
|
||||
}
|
||||
if !status.is_empty() {
|
||||
let _ = write!(url, "&status={}", status.join(","));
|
||||
}
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
&self,
|
||||
id: u64,
|
||||
_options: Option<GetDocumentOptions>,
|
||||
options: Option<GetDocumentOptions>,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||
let mut url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
|
||||
if let Some(fields) = options.and_then(|o| o.fields) {
|
||||
let _ = write!(url, "?fields={}", fields.join(","));
|
||||
}
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
||||
let mut url = format!("/indexes/{}/documents?", encode(self.uid.as_ref()));
|
||||
if let Some(limit) = options.limit {
|
||||
url.push_str(&format!("limit={}&", limit));
|
||||
let _ = write!(url, "limit={}&", limit);
|
||||
}
|
||||
|
||||
if let Some(offset) = options.offset {
|
||||
url.push_str(&format!("offset={}&", offset));
|
||||
let _ = write!(url, "offset={}&", offset);
|
||||
}
|
||||
|
||||
if let Some(attributes_to_retrieve) = options.attributes_to_retrieve {
|
||||
url.push_str(&format!(
|
||||
"attributesToRetrieve={}&",
|
||||
attributes_to_retrieve.join(",")
|
||||
));
|
||||
let _ = write!(url, "fields={}&", attributes_to_retrieve.join(","));
|
||||
}
|
||||
|
||||
self.service.get(url).await
|
||||
@@ -187,7 +183,7 @@ impl Index<'_> {
|
||||
|
||||
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
|
||||
self.service.post(url, settings).await
|
||||
self.service.patch(url, settings).await
|
||||
}
|
||||
|
||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||
@@ -226,15 +222,33 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
|
||||
let params = serde_url_params::to_string(&query).unwrap();
|
||||
let params = yaup::to_string(&query).unwrap();
|
||||
let url = format!("/indexes/{}/search?{}", encode(self.uid.as_ref()), params);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
make_settings_test_routes!(distinct_attribute);
|
||||
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings/{}",
|
||||
encode(self.uid.as_ref()),
|
||||
"distinct-attribute"
|
||||
);
|
||||
self.service.put(url, value).await
|
||||
}
|
||||
|
||||
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/settings/{}",
|
||||
encode(self.uid.as_ref()),
|
||||
"distinct-attribute"
|
||||
);
|
||||
self.service.get(url).await
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GetDocumentOptions;
|
||||
pub struct GetDocumentOptions {
|
||||
pub fields: Option<Vec<&'static str>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct GetAllDocumentsOptions {
|
||||
|
||||
@@ -3,7 +3,7 @@ pub mod server;
|
||||
pub mod service;
|
||||
|
||||
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
||||
pub use server::Server;
|
||||
pub use server::{default_settings, Server};
|
||||
|
||||
/// Performs a search test on both post and get routes
|
||||
#[macro_export]
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use clap::Parser;
|
||||
use std::path::Path;
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
@@ -50,16 +52,13 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn new_auth() -> Self {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
||||
if cfg!(windows) {
|
||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||
} else {
|
||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||
}
|
||||
|
||||
let mut options = default_settings(dir.path());
|
||||
options.master_key = Some("MASTER_KEY".to_string());
|
||||
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
@@ -77,9 +76,15 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn new_with_options(options: Opt) -> Self {
|
||||
let meilisearch = setup_meilisearch(&options).unwrap();
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
|
||||
pub async fn new_auth() -> Self {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let options = default_settings(dir.path());
|
||||
Self::new_auth_with_options(options, dir).await
|
||||
}
|
||||
|
||||
pub async fn new_with_options(options: Opt) -> Result<Self, anyhow::Error> {
|
||||
let meilisearch = setup_meilisearch(&options)?;
|
||||
let auth = AuthController::new(&options.db_path, &options.master_key)?;
|
||||
let service = Service {
|
||||
meilisearch,
|
||||
auth,
|
||||
@@ -87,10 +92,10 @@ impl Server {
|
||||
api_key: None,
|
||||
};
|
||||
|
||||
Server {
|
||||
Ok(Server {
|
||||
service,
|
||||
_dir: None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a view to an index. There is no guarantee that the index exists.
|
||||
@@ -101,8 +106,27 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_indexes(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/indexes").await
|
||||
pub async fn list_indexes(
|
||||
&self,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
) -> (Value, StatusCode) {
|
||||
let (offset, limit) = (
|
||||
offset.map(|offset| format!("offset={offset}")),
|
||||
limit.map(|limit| format!("limit={limit}")),
|
||||
);
|
||||
let query_parameter = offset
|
||||
.as_ref()
|
||||
.zip(limit.as_ref())
|
||||
.map(|(offset, limit)| format!("{offset}&{limit}"))
|
||||
.or_else(|| offset.xor(limit));
|
||||
if let Some(query_parameter) = query_parameter {
|
||||
self.service
|
||||
.get(format!("/indexes?{query_parameter}"))
|
||||
.await
|
||||
} else {
|
||||
self.service.get("/indexes").await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn version(&self) -> (Value, StatusCode) {
|
||||
@@ -126,33 +150,18 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
Opt {
|
||||
db_path: dir.as_ref().join("db"),
|
||||
dumps_dir: dir.as_ref().join("dump"),
|
||||
http_addr: "127.0.0.1:7700".to_owned(),
|
||||
master_key: None,
|
||||
env: "development".to_owned(),
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
no_analytics: Some(Some(true)),
|
||||
max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
max_task_db_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
|
||||
no_analytics: true,
|
||||
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
||||
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
||||
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
|
||||
ssl_cert_path: None,
|
||||
ssl_key_path: None,
|
||||
ssl_auth_path: None,
|
||||
ssl_ocsp_path: None,
|
||||
ssl_require_auth: false,
|
||||
ssl_resumption: false,
|
||||
ssl_tickets: false,
|
||||
import_snapshot: None,
|
||||
ignore_missing_snapshot: false,
|
||||
ignore_snapshot_if_db_exists: false,
|
||||
snapshot_dir: ".".into(),
|
||||
schedule_snapshot: false,
|
||||
snapshot_interval_sec: 0,
|
||||
import_dump: None,
|
||||
indexer_options: IndexerOpts {
|
||||
// memory has to be unlimited because several meilisearch are running in test context.
|
||||
max_memory: MaxMemory::unlimited(),
|
||||
..Default::default()
|
||||
max_indexing_memory: MaxMemory::unlimited(),
|
||||
..Parser::parse_from(None as Option<&str>)
|
||||
},
|
||||
log_level: "off".into(),
|
||||
..Parser::parse_from(None as Option<&str>)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,23 +7,45 @@ use actix_web::test;
|
||||
use meilisearch_http::{analytics, create_app};
|
||||
use serde_json::{json, Value};
|
||||
|
||||
enum HttpVerb {
|
||||
Put,
|
||||
Patch,
|
||||
Post,
|
||||
Get,
|
||||
Delete,
|
||||
}
|
||||
|
||||
impl HttpVerb {
|
||||
fn test_request(&self) -> test::TestRequest {
|
||||
match self {
|
||||
HttpVerb::Put => test::TestRequest::put(),
|
||||
HttpVerb::Patch => test::TestRequest::patch(),
|
||||
HttpVerb::Post => test::TestRequest::post(),
|
||||
HttpVerb::Get => test::TestRequest::get(),
|
||||
HttpVerb::Delete => test::TestRequest::delete(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_json_bad_content_type() {
|
||||
use HttpVerb::{Patch, Post, Put};
|
||||
|
||||
let routes = [
|
||||
// all the POST routes except the dumps that can be created without any body or content-type
|
||||
// all the routes except the dumps that can be created without any body or content-type
|
||||
// and the search that is not a strict json
|
||||
"/indexes",
|
||||
"/indexes/doggo/documents/delete-batch",
|
||||
"/indexes/doggo/search",
|
||||
"/indexes/doggo/settings",
|
||||
"/indexes/doggo/settings/displayed-attributes",
|
||||
"/indexes/doggo/settings/distinct-attribute",
|
||||
"/indexes/doggo/settings/filterable-attributes",
|
||||
"/indexes/doggo/settings/ranking-rules",
|
||||
"/indexes/doggo/settings/searchable-attributes",
|
||||
"/indexes/doggo/settings/sortable-attributes",
|
||||
"/indexes/doggo/settings/stop-words",
|
||||
"/indexes/doggo/settings/synonyms",
|
||||
(Post, "/indexes"),
|
||||
(Post, "/indexes/doggo/documents/delete-batch"),
|
||||
(Post, "/indexes/doggo/search"),
|
||||
(Patch, "/indexes/doggo/settings"),
|
||||
(Put, "/indexes/doggo/settings/displayed-attributes"),
|
||||
(Put, "/indexes/doggo/settings/distinct-attribute"),
|
||||
(Put, "/indexes/doggo/settings/filterable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/ranking-rules"),
|
||||
(Put, "/indexes/doggo/settings/searchable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/sortable-attributes"),
|
||||
(Put, "/indexes/doggo/settings/stop-words"),
|
||||
(Put, "/indexes/doggo/settings/synonyms"),
|
||||
];
|
||||
let bad_content_types = [
|
||||
"application/csv",
|
||||
@@ -45,10 +67,11 @@ async fn error_json_bad_content_type() {
|
||||
analytics::MockAnalytics::new(&server.service.options).0
|
||||
))
|
||||
.await;
|
||||
for route in routes {
|
||||
for (verb, route) in routes {
|
||||
// Good content-type, we probably have an error since we didn't send anything in the json
|
||||
// so we only ensure we didn't get a bad media type error.
|
||||
let req = test::TestRequest::post()
|
||||
let req = verb
|
||||
.test_request()
|
||||
.uri(route)
|
||||
.set_payload(document)
|
||||
.insert_header(("content-type", "application/json"))
|
||||
@@ -59,7 +82,8 @@ async fn error_json_bad_content_type() {
|
||||
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
|
||||
|
||||
// No content-type.
|
||||
let req = test::TestRequest::post()
|
||||
let req = verb
|
||||
.test_request()
|
||||
.uri(route)
|
||||
.set_payload(document)
|
||||
.to_request();
|
||||
@@ -82,7 +106,8 @@ async fn error_json_bad_content_type() {
|
||||
|
||||
for bad_content_type in bad_content_types {
|
||||
// Always bad content-type
|
||||
let req = test::TestRequest::post()
|
||||
let req = verb
|
||||
.test_request()
|
||||
.uri(route)
|
||||
.set_payload(document.to_string())
|
||||
.insert_header(("content-type", bad_content_type))
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use actix_web::test;
|
||||
use chrono::DateTime;
|
||||
use meilisearch_http::{analytics, create_app};
|
||||
use serde_json::{json, Value};
|
||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||
|
||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||
#[actix_rt::test]
|
||||
@@ -35,7 +35,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
|
||||
// put
|
||||
let req = test::TestRequest::put()
|
||||
@@ -48,7 +48,7 @@ async fn add_documents_test_json_content_types() {
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 202);
|
||||
assert_eq!(response["uid"], 1);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
}
|
||||
|
||||
/// any other content-type is must be refused
|
||||
@@ -212,7 +212,7 @@ async fn error_add_malformed_csv_documents() {
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The `csv` payload provided is malformed. `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."#
|
||||
r#"The `csv` payload provided is malformed: `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
@@ -236,7 +236,7 @@ async fn error_add_malformed_csv_documents() {
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The `csv` payload provided is malformed. `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."#
|
||||
r#"The `csv` payload provided is malformed: `CSV error: record 1 (line: 2, byte: 12): found record with 3 fields, but the previous record has 2 fields`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
@@ -307,6 +307,58 @@ async fn error_add_malformed_json_documents() {
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||
);
|
||||
|
||||
// truncate
|
||||
|
||||
// length = 100
|
||||
let long = "0123456789".repeat(10);
|
||||
|
||||
let document = format!("\"{}\"", long);
|
||||
let req = test::TestRequest::put()
|
||||
.uri("/indexes/dog/documents")
|
||||
.set_payload(document)
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The `json` payload provided is malformed. `Couldn't serialize document value: invalid type: string "0123456789012345678901234567...890123456789", expected a documents, or a sequence of documents. at line 1 column 102`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||
);
|
||||
|
||||
// add one more char to the long string to test if the truncating works.
|
||||
let document = format!("\"{}m\"", long);
|
||||
let req = test::TestRequest::put()
|
||||
.uri("/indexes/dog/documents")
|
||||
.set_payload(document)
|
||||
.insert_header(("content-type", "application/json"))
|
||||
.to_request();
|
||||
let res = test::call_service(&app, req).await;
|
||||
let body = test::read_body(res).await;
|
||||
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(
|
||||
response["message"],
|
||||
json!(
|
||||
r#"The `json` payload provided is malformed. `Couldn't serialize document value: invalid type: string "0123456789012345678901234567...90123456789m", expected a documents, or a sequence of documents. at line 1 column 103`."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["code"], json!("malformed_payload"));
|
||||
assert_eq!(response["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
response["link"],
|
||||
json!("https://docs.meilisearch.com/errors#malformed_payload")
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -547,7 +599,7 @@ async fn add_documents_no_index_creation() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
/*
|
||||
* currently we don’t check these field to stay ISO with meilisearch
|
||||
* assert_eq!(response["status"], "pending");
|
||||
@@ -563,17 +615,17 @@ async fn add_documents_no_index_creation() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
|
||||
let processed_at =
|
||||
DateTime::parse_from_rfc3339(response["finishedAt"].as_str().unwrap()).unwrap();
|
||||
OffsetDateTime::parse(response["finishedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
let enqueued_at =
|
||||
DateTime::parse_from_rfc3339(response["enqueuedAt"].as_str().unwrap()).unwrap();
|
||||
OffsetDateTime::parse(response["enqueuedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
assert!(processed_at > enqueued_at);
|
||||
|
||||
// index was created, and primary key was infered.
|
||||
// index was created, and primary key was inferred.
|
||||
let (response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["primaryKey"], "id");
|
||||
@@ -586,7 +638,7 @@ async fn error_document_add_create_index_bad_uid() {
|
||||
let (response, code) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
@@ -603,7 +655,7 @@ async fn error_document_update_create_index_bad_uid() {
|
||||
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
@@ -633,7 +685,7 @@ async fn document_addition_with_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
|
||||
@@ -662,7 +714,7 @@ async fn document_update_with_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["type"], "documentPartial");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 1);
|
||||
assert_eq!(response["details"]["receivedDocuments"], 1);
|
||||
|
||||
@@ -710,20 +762,11 @@ async fn replace_document() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_add_no_documents() {
|
||||
async fn add_no_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.add_documents(json!([]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "The `json` payload must contain at least one document.",
|
||||
"code": "malformed_payload",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#malformed_payload"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
let (_response, code) = index.add_documents(json!([]), None).await;
|
||||
assert_eq!(code, 202);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -775,7 +818,7 @@ async fn add_larger_dataset() {
|
||||
let (response, code) = index.get_task(update_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
assert_eq!(response["type"], "documentAddition");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||
assert_eq!(response["details"]["receivedDocuments"], 77);
|
||||
let (response, code) = index
|
||||
@@ -784,8 +827,8 @@ async fn add_larger_dataset() {
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 77);
|
||||
assert_eq!(code, 200, "failed with `{}`", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -797,7 +840,7 @@ async fn update_larger_dataset() {
|
||||
index.wait_task(0).await;
|
||||
let (response, code) = index.get_task(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["type"], "documentPartial");
|
||||
assert_eq!(response["type"], "documentAdditionOrUpdate");
|
||||
assert_eq!(response["details"]["indexedDocuments"], 77);
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -806,7 +849,7 @@ async fn update_larger_dataset() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 77);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 77);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -825,7 +868,12 @@ async fn error_add_documents_bad_document_id() {
|
||||
let (response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
json!(
|
||||
r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
@@ -848,7 +896,12 @@ async fn error_update_documents_bad_document_id() {
|
||||
index.update_documents(documents, None).await;
|
||||
let response = index.wait_task(1).await;
|
||||
assert_eq!(response["status"], json!("failed"));
|
||||
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
|
||||
assert_eq!(
|
||||
response["error"]["message"],
|
||||
json!(
|
||||
r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."#
|
||||
)
|
||||
);
|
||||
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
|
||||
assert_eq!(response["error"]["type"], json!("invalid_request"));
|
||||
assert_eq!(
|
||||
@@ -948,7 +1001,7 @@ async fn error_document_field_limit_reached() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_add_documents_invalid_geo_field() {
|
||||
async fn add_documents_invalid_geo_field() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(Some("id")).await;
|
||||
@@ -967,16 +1020,7 @@ async fn error_add_documents_invalid_geo_field() {
|
||||
index.wait_task(2).await;
|
||||
let (response, code) = index.get_task(2).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "failed");
|
||||
|
||||
let expected_error = json!({
|
||||
"message": r#"The document with the id: `11` contains an invalid _geo field: `foobar`."#,
|
||||
"code": "invalid_geo_field",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_geo_field"
|
||||
});
|
||||
|
||||
assert_eq!(response["error"], expected_error);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -72,7 +72,7 @@ async fn clear_all_documents() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -89,7 +89,7 @@ async fn clear_all_documents_empty_index() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -125,8 +125,8 @@ async fn delete_batch() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 3);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["results"][0]["id"], json!(3));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -143,5 +143,5 @@ async fn delete_no_document_batch() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 3);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 3);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::common::GetAllDocumentsOptions;
|
||||
use crate::common::Server;
|
||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
@@ -39,19 +38,51 @@ async fn get_document() {
|
||||
let documents = serde_json::json!([
|
||||
{
|
||||
"id": 0,
|
||||
"content": "foobar",
|
||||
"nested": { "content": "foobar" },
|
||||
}
|
||||
]);
|
||||
let (_, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(0).await;
|
||||
index.wait_task(1).await;
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!( {
|
||||
serde_json::json!({
|
||||
"id": 0,
|
||||
"content": "foobar",
|
||||
"nested": { "content": "foobar" },
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["id"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
"id": 0,
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["nested.content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!({
|
||||
"nested": { "content": "foobar" },
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -88,7 +119,7 @@ async fn get_no_document() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -101,7 +132,7 @@ async fn get_all_documents_no_options() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let arr = response.as_array().unwrap();
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
let first = serde_json::json!({
|
||||
"id":0,
|
||||
@@ -137,8 +168,11 @@ async fn test_get_all_documents_limit() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 5);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 0);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 5);
|
||||
assert_eq!(response["results"][0]["id"], json!(0));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(5));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -154,8 +188,11 @@ async fn test_get_all_documents_offset() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 13);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
assert_eq!(response["results"][0]["id"], json!(5));
|
||||
assert_eq!(response["offset"], json!(5));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -171,20 +208,14 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
assert!(response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("name")
|
||||
.is_some());
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 1);
|
||||
assert!(results["name"] != json!(null));
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -193,15 +224,13 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 0);
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -210,15 +239,13 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 0);
|
||||
}
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -227,15 +254,12 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
2
|
||||
);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 2);
|
||||
assert!(results["name"] != json!(null));
|
||||
assert!(results["tags"] != json!(null));
|
||||
}
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -244,15 +268,10 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
16
|
||||
);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 16);
|
||||
}
|
||||
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
@@ -261,19 +280,99 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
for results in response["results"].as_array().unwrap() {
|
||||
assert_eq!(results.as_object().unwrap().keys().count(), 16);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_s_nested_attributes_to_retrieve() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
"content.truc": "foobar",
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
"machin": "bidule",
|
||||
},
|
||||
},
|
||||
]);
|
||||
let (_, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response, json!({}));
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
1,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
16
|
||||
response,
|
||||
json!({
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
"machin": "bidule",
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
0,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content.truc"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
json!({
|
||||
"content.truc": "foobar",
|
||||
})
|
||||
);
|
||||
let (response, code) = index
|
||||
.get_document(
|
||||
1,
|
||||
Some(GetDocumentOptions {
|
||||
fields: Some(vec!["content.truc"]),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
response,
|
||||
json!({
|
||||
"content": {
|
||||
"truc": "foobar",
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_documents_displayed_attributes() {
|
||||
async fn get_documents_displayed_attributes_is_ignored() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index
|
||||
@@ -285,23 +384,19 @@ async fn get_documents_displayed_attributes() {
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
1
|
||||
response["results"][0].as_object().unwrap().keys().count(),
|
||||
16
|
||||
);
|
||||
assert!(response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("gender")
|
||||
.is_some());
|
||||
assert!(response["results"][0]["gender"] != json!(null));
|
||||
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["total"], json!(77));
|
||||
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_object().unwrap().keys().count(), 1);
|
||||
assert_eq!(response.as_object().unwrap().keys().count(), 16);
|
||||
assert!(response.as_object().unwrap().get("gender").is_some());
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
mod common;
|
||||
|
||||
use crate::common::Server;
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_dump_status() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.get_dump_status("foobar").await;
|
||||
assert_eq!(code, 404);
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Dump `foobar` not found.",
|
||||
"code": "dump_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#dump_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
}
|
||||
73
meilisearch-http/tests/dumps/data.rs
Normal file
73
meilisearch-http/tests/dumps/data.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use manifest_dir_macros::exist_relative_path;
|
||||
|
||||
pub enum GetDump {
|
||||
MoviesRawV1,
|
||||
MoviesWithSettingsV1,
|
||||
RubyGemsWithSettingsV1,
|
||||
|
||||
MoviesRawV2,
|
||||
MoviesWithSettingsV2,
|
||||
RubyGemsWithSettingsV2,
|
||||
|
||||
MoviesRawV3,
|
||||
MoviesWithSettingsV3,
|
||||
RubyGemsWithSettingsV3,
|
||||
|
||||
MoviesRawV4,
|
||||
MoviesWithSettingsV4,
|
||||
RubyGemsWithSettingsV4,
|
||||
|
||||
TestV5,
|
||||
}
|
||||
|
||||
impl GetDump {
|
||||
pub fn path(&self) -> PathBuf {
|
||||
match self {
|
||||
GetDump::MoviesRawV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV1 => {
|
||||
exist_relative_path!("tests/assets/v1_v0.20.0_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_movies_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::RubyGemsWithSettingsV2 => {
|
||||
exist_relative_path!("tests/assets/v2_v0.21.1_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV3 => {
|
||||
exist_relative_path!("tests/assets/v3_v0.24.0_rubygems_with_settings.dump").into()
|
||||
}
|
||||
|
||||
GetDump::MoviesRawV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_movies.dump").into()
|
||||
}
|
||||
GetDump::MoviesWithSettingsV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_movies_with_settings.dump").into()
|
||||
}
|
||||
GetDump::RubyGemsWithSettingsV4 => {
|
||||
exist_relative_path!("tests/assets/v4_v0.25.2_rubygems_with_settings.dump").into()
|
||||
}
|
||||
GetDump::TestV5 => {
|
||||
exist_relative_path!("tests/assets/v5_v0.28.0_test_dump.dump").into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
677
meilisearch-http/tests/dumps/mod.rs
Normal file
677
meilisearch-http/tests/dumps/mod.rs
Normal file
@@ -0,0 +1,677 @@
|
||||
mod data;
|
||||
|
||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
|
||||
use meilisearch_http::Opt;
|
||||
use serde_json::json;
|
||||
|
||||
use self::data::GetDump;
|
||||
|
||||
// all the following test are ignored on windows. See #2364
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v1() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
for path in [
|
||||
GetDump::MoviesRawV1.path(),
|
||||
GetDump::MoviesWithSettingsV1.path(),
|
||||
GetDump::RubyGemsWithSettingsV1.path(),
|
||||
] {
|
||||
let options = Opt {
|
||||
import_dump: Some(path),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let error = Server::new_with_options(options)
|
||||
.await
|
||||
.map(|_| ())
|
||||
.unwrap_err();
|
||||
|
||||
assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.");
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v2_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV2.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 }})
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can["results"] still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v3_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV3.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_movie_raw() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesRawV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_movie_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::MoviesWithSettingsV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("indexUID");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks,
|
||||
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(100, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(500, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(10006, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v4_rubygems_with_settings() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::RubyGemsWithSettingsV4.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let index = server.index("rubygems");
|
||||
|
||||
let (stats, code) = index.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
stats,
|
||||
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
|
||||
);
|
||||
|
||||
let (settings, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
settings,
|
||||
json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
|
||||
);
|
||||
|
||||
let (tasks, code) = index.list_tasks().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
tasks["results"][0],
|
||||
json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
|
||||
);
|
||||
|
||||
// finally we're just going to check that we can still get a few documents by id
|
||||
let (document, code) = index.get_document(188040, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(191940, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
|
||||
);
|
||||
|
||||
let (document, code) = index.get_document(159227, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(
|
||||
document,
|
||||
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn import_dump_v5() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options = Opt {
|
||||
import_dump: Some(GetDump::TestV5.path()),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
let mut server = Server::new_auth_with_options(options, temp).await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
|
||||
let (indexes, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200, "{indexes}");
|
||||
|
||||
assert_eq!(indexes["results"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(indexes["results"][0]["uid"], json!("test"));
|
||||
assert_eq!(indexes["results"][1]["uid"], json!("test2"));
|
||||
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
|
||||
|
||||
let expected_stats = json!({
|
||||
"numberOfDocuments": 10,
|
||||
"isIndexing": false,
|
||||
"fieldDistribution": {
|
||||
"cast": 10,
|
||||
"director": 10,
|
||||
"genres": 10,
|
||||
"id": 10,
|
||||
"overview": 10,
|
||||
"popularity": 10,
|
||||
"poster_path": 10,
|
||||
"producer": 10,
|
||||
"production_companies": 10,
|
||||
"release_date": 10,
|
||||
"tagline": 10,
|
||||
"title": 10,
|
||||
"vote_average": 10,
|
||||
"vote_count": 10
|
||||
}
|
||||
});
|
||||
|
||||
let index1 = server.index("test");
|
||||
let index2 = server.index("test2");
|
||||
|
||||
let (stats, code) = index1.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(stats, expected_stats);
|
||||
|
||||
let (docs, code) = index2
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
||||
let (docs, code) = index1
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
|
||||
|
||||
let (stats, code) = index2.stats().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(stats, expected_stats);
|
||||
|
||||
let (keys, code) = server.list_api_keys().await;
|
||||
assert_eq!(code, 200);
|
||||
let key = &keys["results"][0];
|
||||
|
||||
assert_eq!(key["name"], "my key");
|
||||
}
|
||||
@@ -102,7 +102,7 @@ async fn error_create_with_invalid_index_uid() {
|
||||
let (response, code) = index.create(None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "invalid index uid `test test#!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
|
||||
@@ -43,8 +43,8 @@ async fn error_delete_unexisting_index() {
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn loop_delete_add_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
@@ -52,10 +52,10 @@ async fn loop_delete_add_documents() {
|
||||
let mut tasks = Vec::new();
|
||||
for _ in 0..50 {
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
tasks.push(response["uid"].as_u64().unwrap());
|
||||
tasks.push(response["taskUid"].as_u64().unwrap());
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
let (response, code) = index.delete().await;
|
||||
tasks.push(response["uid"].as_u64().unwrap());
|
||||
tasks.push(response["taskUid"].as_u64().unwrap());
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
}
|
||||
|
||||
|
||||
@@ -16,12 +16,11 @@ async fn create_and_get_index() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
assert_eq!(response["createdAt"], response["updatedAt"]);
|
||||
assert_eq!(response["primaryKey"], Value::Null);
|
||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||
assert_eq!(response.as_object().unwrap().len(), 4);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -45,10 +44,10 @@ async fn error_get_unexisting_index() {
|
||||
#[actix_rt::test]
|
||||
async fn no_index_return_empty_list() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.list_indexes().await;
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.is_array());
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
assert!(response["results"].is_array());
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -59,10 +58,10 @@ async fn list_multiple_indexes() {
|
||||
|
||||
server.index("test").wait_task(1).await;
|
||||
|
||||
let (response, code) = server.list_indexes().await;
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.is_array());
|
||||
let arr = response.as_array().unwrap();
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 2);
|
||||
assert!(arr
|
||||
.iter()
|
||||
@@ -72,6 +71,118 @@ async fn list_multiple_indexes() {
|
||||
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_and_paginate_indexes() {
|
||||
let server = Server::new().await;
|
||||
const NB_INDEXES: usize = 50;
|
||||
for i in 0..NB_INDEXES {
|
||||
server.index(&format!("test_{i:02}")).create(None).await;
|
||||
server
|
||||
.index(&format!("test_{i:02}"))
|
||||
.wait_task(i as u64)
|
||||
.await;
|
||||
}
|
||||
|
||||
// basic
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
// ensuring we get all the indexes in the alphabetical order
|
||||
assert!((0..20)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with an offset
|
||||
let (response, code) = server.list_indexes(Some(15), None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(15));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
assert!((15..35)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with an offset and not enough elements
|
||||
let (response, code) = server.list_indexes(Some(45), None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(20));
|
||||
assert_eq!(response["offset"], json!(45));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 5);
|
||||
assert!((45..50)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit lower than the default
|
||||
let (response, code) = server.list_indexes(None, Some(5)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(5));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 5);
|
||||
assert!((0..5)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit higher than the default
|
||||
let (response, code) = server.list_indexes(None, Some(40)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(40));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 40);
|
||||
assert!((0..40)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit higher than the default
|
||||
let (response, code) = server.list_indexes(None, Some(80)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(80));
|
||||
assert_eq!(response["offset"], json!(0));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 50);
|
||||
assert!((0..50)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
|
||||
// with a limit and an offset
|
||||
let (response, code) = server.list_indexes(Some(20), Some(10)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["limit"], json!(10));
|
||||
assert_eq!(response["offset"], json!(20));
|
||||
assert_eq!(response["total"], json!(NB_INDEXES));
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 10);
|
||||
assert!((20..30)
|
||||
.map(|idx| format!("test_{idx:02}"))
|
||||
.zip(arr)
|
||||
.all(|(expected, entry)| entry["uid"] == expected));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_invalid_index_uid() {
|
||||
let server = Server::new().await;
|
||||
|
||||
@@ -35,7 +35,7 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["uid"], 1);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::common::Server;
|
||||
use chrono::DateTime;
|
||||
use serde_json::json;
|
||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_primary_key() {
|
||||
@@ -21,16 +21,17 @@ async fn update_primary_key() {
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["name"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
|
||||
let created_at = DateTime::parse_from_rfc3339(response["createdAt"].as_str().unwrap()).unwrap();
|
||||
let updated_at = DateTime::parse_from_rfc3339(response["updatedAt"].as_str().unwrap()).unwrap();
|
||||
let created_at =
|
||||
OffsetDateTime::parse(response["createdAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
let updated_at =
|
||||
OffsetDateTime::parse(response["updatedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
assert!(created_at < updated_at);
|
||||
|
||||
assert_eq!(response["primaryKey"], "primary");
|
||||
assert_eq!(response.as_object().unwrap().len(), 5);
|
||||
assert_eq!(response.as_object().unwrap().len(), 4);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -2,6 +2,7 @@ mod auth;
|
||||
mod common;
|
||||
mod dashboard;
|
||||
mod documents;
|
||||
mod dumps;
|
||||
mod index;
|
||||
mod search;
|
||||
mod settings;
|
||||
|
||||
@@ -36,6 +36,30 @@ async fn search_unexisting_parameter() {
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_invalid_highlight_and_crop_tags() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let fields = &["cropMarker", "highlightPreTag", "highlightPostTag"];
|
||||
|
||||
for field in fields {
|
||||
// object
|
||||
let (response, code) = index
|
||||
.search_post(json!({field.to_string(): {"marker": "<crop>"}}))
|
||||
.await;
|
||||
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
|
||||
// array
|
||||
let (response, code) = index
|
||||
.search_post(json!({field.to_string(): ["marker", "<crop>"]}))
|
||||
.await;
|
||||
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_object() {
|
||||
let server = Server::new().await;
|
||||
@@ -83,7 +107,7 @@ async fn filter_invalid_syntax_array() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
});
|
||||
index
|
||||
.search(json!({"filter": [["title & Glass"]]}), |response, code| {
|
||||
.search(json!({"filter": ["title & Glass"]}), |response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
})
|
||||
@@ -140,7 +164,7 @@ async fn filter_invalid_attribute_array() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
});
|
||||
index
|
||||
.search(json!({"filter": [["many = Glass"]]}), |response, code| {
|
||||
.search(json!({"filter": ["many = Glass"]}), |response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
})
|
||||
@@ -194,7 +218,7 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_filter"
|
||||
});
|
||||
index
|
||||
.search(json!({"filter": [["_geo = Glass"]]}), |response, code| {
|
||||
.search(json!({"filter": ["_geo = Glass"]}), |response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
})
|
||||
@@ -249,7 +273,7 @@ async fn filter_reserved_attribute_array() {
|
||||
});
|
||||
index
|
||||
.search(
|
||||
json!({"filter": [["_geoDistance = Glass"]]}),
|
||||
json!({"filter": ["_geoDistance = Glass"]}),
|
||||
|response, code| {
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 400);
|
||||
|
||||
471
meilisearch-http/tests/search/formatted.rs
Normal file
471
meilisearch-http/tests/search/formatted.rs
Normal file
@@ -0,0 +1,471 @@
|
||||
use super::*;
|
||||
use crate::common::Server;
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn formatted_contain_wildcard() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
index
|
||||
.update_settings(json!({ "displayedAttributes": ["id", "cattos"] }))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index.search(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }),
|
||||
|response, code|
|
||||
{
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "<em>pesti</em>",
|
||||
},
|
||||
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pesti",
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "showMatchesPosition": true }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pesti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pesti",
|
||||
},
|
||||
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToCrop": ["*"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pesti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pesti",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToCrop": ["*"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"cattos": "pesti",
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
"cattos": "pesti",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn format_nested() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": 2,
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": 4,
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "showMatchesPosition": true }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
"_matchesPosition": {"doggos.name": [{"start": 0, "length": 5}]},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToCrop": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"], "attributesToHighlight": ["doggos.age"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
},
|
||||
],
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": "2",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": "4",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.age"], "attributesToCrop": ["doggos.name"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": "2",
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": "4",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn displayedattr_2_smol() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
// not enough displayed for the other settings
|
||||
index
|
||||
.update_settings(json!({ "displayedAttributes": ["id"] }))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToHighlight": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(json!({ "attributesToCrop": ["id"] }), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
})
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToHighlight": ["id"], "attributesToCrop": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToHighlight": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToCrop": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"id": 852,
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"][0], json!({}));
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["cattos"], "attributesToCrop": ["cattos"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"][0], json!({}));
|
||||
|
||||
}
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({ "attributesToRetrieve": ["cattos"], "attributesToCrop": ["id"] }),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(
|
||||
response["hits"][0],
|
||||
json!({
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -1,39 +1,97 @@
|
||||
// This modules contains all the test concerning search. Each particular feture of the search
|
||||
// This modules contains all the test concerning search. Each particular feature of the search
|
||||
// should be tested in its own module to isolate tests and keep the tests readable.
|
||||
|
||||
mod errors;
|
||||
mod formatted;
|
||||
|
||||
use crate::common::Server;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"id": "287947"
|
||||
"id": "287947",
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537"
|
||||
"id": "299537",
|
||||
},
|
||||
{
|
||||
"title": "Escape Room",
|
||||
"id": "522681"
|
||||
"id": "522681",
|
||||
},
|
||||
{ "title": "How to Train Your Dragon: The Hidden World", "id": "166428"
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
},
|
||||
{
|
||||
"title": "Glass",
|
||||
"id": "450465"
|
||||
"id": "450465",
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
pub(self) static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": 852,
|
||||
"father": "jean",
|
||||
"mother": "michelle",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": 2,
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": 4,
|
||||
},
|
||||
],
|
||||
"cattos": "pesti",
|
||||
},
|
||||
{
|
||||
"id": 654,
|
||||
"father": "pierre",
|
||||
"mother": "sabine",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "gros bill",
|
||||
"age": 8,
|
||||
},
|
||||
],
|
||||
"cattos": ["simba", "pestiféré"],
|
||||
},
|
||||
{
|
||||
"id": 750,
|
||||
"father": "romain",
|
||||
"mother": "michelle",
|
||||
"cattos": ["enigma"],
|
||||
},
|
||||
{
|
||||
"id": 951,
|
||||
"father": "jean-baptiste",
|
||||
"mother": "sophie",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "turbo",
|
||||
"age": 5,
|
||||
},
|
||||
{
|
||||
"name": "fast",
|
||||
"age": 6,
|
||||
},
|
||||
],
|
||||
"cattos": ["moumoute", "gomez"],
|
||||
},
|
||||
])
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_placeholder_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let index = server.index("basic");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
@@ -45,6 +103,18 @@ async fn simple_placeholder_search() {
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
|
||||
})
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -62,6 +132,18 @@ async fn simple_search() {
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
})
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "pesti"}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -88,6 +170,27 @@ async fn search_multiple_params() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "pesti",
|
||||
"attributesToCrop": ["catto:2"],
|
||||
"attributesToHighlight": ["catto"],
|
||||
"limit": 2,
|
||||
"offset": 0,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -114,6 +217,43 @@ async fn search_with_filter_string_notation() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]}))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(3).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"filter": "cattos = pesti"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["hits"][0]["id"], json!(852));
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"filter": "doggos.age > 5"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(response["hits"][0]["id"], json!(654));
|
||||
assert_eq!(response["hits"][1]["id"], json!(951));
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -170,6 +310,28 @@ async fn search_with_sort_on_numbers() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
|
||||
index
|
||||
.update_settings(json!({"sortableAttributes": ["doggos.age"]}))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(3).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"sort": ["doggos.age:asc"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -196,6 +358,28 @@ async fn search_with_sort_on_strings() {
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
|
||||
index
|
||||
.update_settings(json!({"sortableAttributes": ["doggos.name"]}))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(3).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"sort": ["doggos.name:asc"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -236,16 +420,94 @@ async fn search_facet_distribution() {
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facetsDistribution": ["title"]
|
||||
"facets": ["title"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetsDistribution"].as_object().unwrap();
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert!(dist.get("title").is_some());
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["father", "doggos.name"]}))
|
||||
.await;
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(3).await;
|
||||
|
||||
// TODO: TAMO: fix the test
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
// "facets": ["father", "doggos.name"]
|
||||
"facets": ["father"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert_eq!(
|
||||
dist["father"],
|
||||
json!({ "jean": 1, "pierre": 1, "romain": 1, "jean-baptiste": 1})
|
||||
);
|
||||
/*
|
||||
assert_eq!(
|
||||
dist["doggos.name"],
|
||||
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
|
||||
);
|
||||
*/
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({"filterableAttributes": ["doggos"]}))
|
||||
.await;
|
||||
index.wait_task(4).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["doggos.name"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 1);
|
||||
assert_eq!(
|
||||
dist["doggos.name"],
|
||||
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["doggos"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let dist = response["facetDistribution"].as_object().unwrap();
|
||||
assert_eq!(dist.len(), 3);
|
||||
assert_eq!(
|
||||
dist["doggos.name"],
|
||||
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
|
||||
);
|
||||
assert_eq!(
|
||||
dist["doggos.age"],
|
||||
json!({ "2": 1, "4": 1, "5": 1, "6": 1, "8": 1})
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -265,5 +527,192 @@ async fn displayed_attributes() {
|
||||
.search_post(json!({ "attributesToRetrieve": ["title", "id"] }))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert!(response["hits"].get("title").is_none());
|
||||
assert!(response["hits"][0].get("title").is_some());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_is_hard_limited() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents: Vec<_> = (0..1200)
|
||||
.map(|i| json!({ "id": i, "text": "I am unique!" }))
|
||||
.collect();
|
||||
index.add_documents(documents.into(), None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1000);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"offset": 800,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"offset": 1000,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_is_hard_limited() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents: Vec<_> = (0..1200)
|
||||
.map(|i| json!({ "id": i, "text": "I am unique!" }))
|
||||
.collect();
|
||||
index.add_documents(documents.into(), None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1000);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"offset": 800,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"limit": 1500,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"q": "unique",
|
||||
"offset": 1000,
|
||||
"limit": 400,
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn faceting_max_values_per_facet() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
index
|
||||
.update_settings(json!({ "filterableAttributes": ["number"] }))
|
||||
.await;
|
||||
|
||||
let documents: Vec<_> = (0..10_000)
|
||||
.map(|id| json!({ "id": id, "number": id * 10 }))
|
||||
.collect();
|
||||
index.add_documents(json!(documents), None).await;
|
||||
index.wait_task(1).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["number"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let numbers = response["facetDistribution"]["number"].as_object().unwrap();
|
||||
assert_eq!(numbers.len(), 100);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } }))
|
||||
.await;
|
||||
index.wait_task(2).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"facets": ["number"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let numbers = dbg!(&response)["facetDistribution"]["number"]
|
||||
.as_object()
|
||||
.unwrap();
|
||||
assert_eq!(numbers.len(), 10_000);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -24,6 +24,18 @@ static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|
|
||||
);
|
||||
map.insert("stop_words", json!([]));
|
||||
map.insert("synonyms", json!({}));
|
||||
map.insert(
|
||||
"faceting",
|
||||
json!({
|
||||
"maxValuesPerFacet": json!(100),
|
||||
}),
|
||||
);
|
||||
map.insert(
|
||||
"pagination",
|
||||
json!({
|
||||
"maxTotalHits": json!(1000),
|
||||
}),
|
||||
);
|
||||
map
|
||||
});
|
||||
|
||||
@@ -43,7 +55,7 @@ async fn get_settings() {
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
let settings = response.as_object().unwrap();
|
||||
assert_eq!(settings.keys().len(), 8);
|
||||
assert_eq!(settings.keys().len(), 11);
|
||||
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||
assert_eq!(settings["filterableAttributes"], json!([]));
|
||||
@@ -61,6 +73,18 @@ async fn get_settings() {
|
||||
])
|
||||
);
|
||||
assert_eq!(settings["stopWords"], json!([]));
|
||||
assert_eq!(
|
||||
settings["faceting"],
|
||||
json!({
|
||||
"maxValuesPerFacet": 100,
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
settings["pagination"],
|
||||
json!({
|
||||
"maxTotalHits": 1000,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -122,7 +146,7 @@ async fn reset_all_settings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["uid"], 0);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
@@ -179,7 +203,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
|
||||
assert_eq!(code, 400);
|
||||
|
||||
let expected = json!({
|
||||
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "invalid index uid `test##! `, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"});
|
||||
@@ -188,7 +212,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
|
||||
}
|
||||
|
||||
macro_rules! test_setting_routes {
|
||||
($($setting:ident), *) => {
|
||||
($($setting:ident $write_method:ident), *) => {
|
||||
$(
|
||||
mod $setting {
|
||||
use crate::common::Server;
|
||||
@@ -214,7 +238,7 @@ macro_rules! test_setting_routes {
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.post(url, serde_json::Value::Null).await;
|
||||
let (response, code) = server.service.$write_method(url, serde_json::Value::Null).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
server.index("").wait_task(0).await;
|
||||
let (response, code) = server.index("test").get().await;
|
||||
@@ -258,13 +282,15 @@ macro_rules! test_setting_routes {
|
||||
}
|
||||
|
||||
test_setting_routes!(
|
||||
filterable_attributes,
|
||||
displayed_attributes,
|
||||
searchable_attributes,
|
||||
distinct_attribute,
|
||||
stop_words,
|
||||
ranking_rules,
|
||||
synonyms
|
||||
filterable_attributes put,
|
||||
displayed_attributes put,
|
||||
searchable_attributes put,
|
||||
distinct_attribute put,
|
||||
stop_words put,
|
||||
ranking_rules put,
|
||||
synonyms put,
|
||||
pagination patch,
|
||||
faceting patch
|
||||
);
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -283,7 +309,7 @@ async fn error_set_invalid_ranking_rules() {
|
||||
assert_eq!(response["status"], "failed");
|
||||
|
||||
let expected_error = json!({
|
||||
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are Words, Typo, Sort, Proximity, Attribute, Exactness and custom ranking rules."#,
|
||||
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules."#,
|
||||
"code": "invalid_ranking_rule",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_ranking_rule"
|
||||
|
||||
@@ -41,7 +41,7 @@ async fn perform_snapshot() {
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
|
||||
let server = Server::new_with_options(options).await;
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let index = server.index("test");
|
||||
index
|
||||
@@ -60,20 +60,17 @@ async fn perform_snapshot() {
|
||||
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let snapshot_path = snapshot_dir
|
||||
.path()
|
||||
.to_owned()
|
||||
.join("db.snapshot".to_string());
|
||||
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
||||
|
||||
let options = Opt {
|
||||
import_snapshot: Some(snapshot_path),
|
||||
..default_settings(temp.path())
|
||||
};
|
||||
|
||||
let snapshot_server = Server::new_with_options(options).await;
|
||||
let snapshot_server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
verify_snapshot!(server, snapshot_server, |server| =>
|
||||
server.list_indexes(),
|
||||
server.list_indexes(None, None),
|
||||
// for some reason the db sizes differ. this may be due to the compaction options we have
|
||||
// set when performing the snapshot
|
||||
//server.stats(),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use serde_json::json;
|
||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||
|
||||
use crate::common::Server;
|
||||
|
||||
@@ -53,15 +54,19 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
assert_eq!(response["uid"], 1);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
let timestamp = OffsetDateTime::now_utc();
|
||||
let (response, code) = server.stats().await;
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["databaseSize"].as_u64().unwrap() > 0);
|
||||
assert!(response.get("lastUpdate").is_some());
|
||||
let last_update =
|
||||
OffsetDateTime::parse(response["lastUpdate"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||
assert!(last_update - timestamp < time::Duration::SECOND);
|
||||
|
||||
assert_eq!(response["indexes"]["test"]["numberOfDocuments"], 2);
|
||||
assert!(response["indexes"]["test"]["isIndexing"] == false);
|
||||
assert_eq!(response["indexes"]["test"]["fieldDistribution"]["id"], 2);
|
||||
|
||||
@@ -1,22 +1,7 @@
|
||||
use crate::common::Server;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_task_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.service.get("/indexes/test/tasks").await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_unexisting_task_status() {
|
||||
@@ -54,23 +39,7 @@ async fn get_task_status() {
|
||||
index.wait_task(0).await;
|
||||
let (_response, code) = index.get_task(1).await;
|
||||
assert_eq!(code, 200);
|
||||
// TODO check resonse format, as per #48
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_list_tasks_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").list_tasks().await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
assert_eq!(response, expected_response);
|
||||
assert_eq!(code, 404);
|
||||
// TODO check response format, as per #48
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -90,15 +59,146 @@ async fn list_tasks() {
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_with_star_filters() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
let (response, code) = index.service.get("/tasks?indexUid=test").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index.service.get("/tasks?indexUid=*").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index.service.get("/tasks?indexUid=*,pasteque").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index.service.get("/tasks?type=*").await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?type=*,documentAdditionOrUpdate&status=*")
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test")
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test,*")
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_status_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
// We can't be sure that the update isn't already processed so we can't test this
|
||||
// let (response, code) = index.filtered_tasks(&[], &["processing"]).await;
|
||||
// assert_eq!(code, 200, "{}", response);
|
||||
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_type_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) = index
|
||||
.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[])
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_status_and_type_filtered() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.wait_task(0).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 0);
|
||||
|
||||
let (response, code) = index
|
||||
.filtered_tasks(
|
||||
&["indexCreation", "documentAdditionOrUpdate"],
|
||||
&["succeeded", "processing", "enqueued"],
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
macro_rules! assert_valid_summarized_task {
|
||||
($response:expr, $task_type:literal, $index:literal) => {{
|
||||
assert_eq!($response.as_object().unwrap().len(), 5);
|
||||
assert!($response["uid"].as_u64().is_some());
|
||||
assert!($response["taskUid"].as_u64().is_some());
|
||||
assert_eq!($response["indexUid"], $index);
|
||||
assert_eq!($response["status"], "enqueued");
|
||||
assert_eq!($response["type"], $task_type);
|
||||
let date = $response["enqueuedAt"].as_str().expect("missing date");
|
||||
date.parse::<DateTime<Utc>>().unwrap();
|
||||
|
||||
OffsetDateTime::parse(date, &Rfc3339).unwrap();
|
||||
}};
|
||||
}
|
||||
|
||||
@@ -117,16 +217,16 @@ async fn test_summarized_task_view() {
|
||||
assert_valid_summarized_task!(response, "settingsUpdate", "test");
|
||||
|
||||
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentPartial", "test");
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
|
||||
|
||||
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentAddition", "test");
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
|
||||
|
||||
let (response, _) = index.delete_document(1).await;
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
|
||||
let (response, _) = index.clear_all_documents().await;
|
||||
assert_valid_summarized_task!(response, "clearAll", "test");
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
|
||||
let (response, _) = index.delete().await;
|
||||
assert_valid_summarized_task!(response, "indexDeletion", "test");
|
||||
|
||||
@@ -1,68 +1,65 @@
|
||||
[package]
|
||||
name = "meilisearch-lib"
|
||||
version = "0.25.2"
|
||||
edition = "2018"
|
||||
resolver = "2"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
version = "0.28.1"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.9", features = ["rustls"] }
|
||||
actix-web-static-files = { git = "https://github.com/MarinPostma/actix-web-static-files.git", rev = "39d8006", optional = true }
|
||||
anyhow = { version = "1.0.43", features = ["backtrace"] }
|
||||
async-stream = "0.3.2"
|
||||
async-trait = "0.1.51"
|
||||
byte-unit = { version = "4.0.12", default-features = false, features = ["std"] }
|
||||
actix-web = { version = "4.0.1", default-features = false }
|
||||
anyhow = { version = "1.0.56", features = ["backtrace"] }
|
||||
async-stream = "0.3.3"
|
||||
async-trait = "0.1.52"
|
||||
atomic_refcell = "0.1.8"
|
||||
byte-unit = { version = "4.0.14", default-features = false, features = ["std", "serde"] }
|
||||
bytes = "1.1.0"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
clap = { version = "3.1.6", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.2"
|
||||
csv = "1.1.6"
|
||||
crossbeam-channel = "0.5.1"
|
||||
derivative = "2.2.0"
|
||||
either = "1.6.1"
|
||||
flate2 = "1.0.21"
|
||||
flate2 = "1.0.22"
|
||||
fs_extra = "1.2.0"
|
||||
fst = "0.4.7"
|
||||
futures = "0.3.17"
|
||||
futures-util = "0.3.17"
|
||||
heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" }
|
||||
http = "0.2.4"
|
||||
indexmap = { version = "1.7.0", features = ["serde-1"] }
|
||||
itertools = "0.10.1"
|
||||
futures = "0.3.21"
|
||||
futures-util = "0.3.21"
|
||||
http = "0.2.6"
|
||||
indexmap = { version = "1.8.0", features = ["serde-1"] }
|
||||
itertools = "0.10.3"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.14"
|
||||
meilisearch-error = { path = "../meilisearch-error" }
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.21.1" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.31.2" }
|
||||
mime = "0.3.16"
|
||||
num_cpus = "1.13.0"
|
||||
once_cell = "1.8.0"
|
||||
parking_lot = "0.11.2"
|
||||
rand = "0.8.4"
|
||||
rayon = "1.5.1"
|
||||
regex = "1.5.4"
|
||||
rustls = "0.19.1"
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
serde_json = { version = "1.0.67", features = ["preserve_order"] }
|
||||
siphasher = "0.3.7"
|
||||
slice-group-by = "0.2.6"
|
||||
structopt = "0.3.23"
|
||||
tar = "0.4.37"
|
||||
tempfile = "3.2.0"
|
||||
thiserror = "1.0.28"
|
||||
tokio = { version = "1.11.0", features = ["full"] }
|
||||
uuid = { version = "0.8.2", features = ["serde"] }
|
||||
walkdir = "2.3.2"
|
||||
num_cpus = "1.13.1"
|
||||
obkv = "0.2.0"
|
||||
pin-project = "1.0.8"
|
||||
whoami = { version = "1.1.3", optional = true }
|
||||
reqwest = { version = "0.11.4", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
||||
sysinfo = "0.20.2"
|
||||
derivative = "2.2.0"
|
||||
fs_extra = "1.2.0"
|
||||
once_cell = "1.10.0"
|
||||
parking_lot = "0.12.0"
|
||||
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
||||
rand = "0.8.5"
|
||||
rayon = "1.5.1"
|
||||
regex = "1.5.5"
|
||||
reqwest = { version = "0.11.9", features = ["json", "rustls-tls"], default-features = false, optional = true }
|
||||
roaring = "0.9.0"
|
||||
rustls = "0.20.4"
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde_json = { version = "1.0.79", features = ["preserve_order"] }
|
||||
siphasher = "0.3.10"
|
||||
slice-group-by = "0.3.0"
|
||||
sysinfo = "0.23.5"
|
||||
tar = "0.4.38"
|
||||
tempfile = "3.3.0"
|
||||
thiserror = "1.0.30"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
tokio = { version = "1.17.0", features = ["full"] }
|
||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
walkdir = "2.3.2"
|
||||
whoami = { version = "1.2.1", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2.0"
|
||||
mockall = "0.10.2"
|
||||
paste = "1.0.5"
|
||||
nelson = { git = "https://github.com/MarinPostma/nelson.git", rev = "e5f4ff046c21e7e986c7cb31550d1c9e7f0b693b"}
|
||||
meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] }
|
||||
actix-rt = "2.7.0"
|
||||
meilisearch-types = { path = "../meilisearch-types", features = ["test-traits"] }
|
||||
mockall = "0.11.0"
|
||||
nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
||||
paste = "1.0.6"
|
||||
proptest = "1.0.0"
|
||||
proptest-derive = "0.3.0"
|
||||
|
||||
@@ -17,4 +17,3 @@ cc 3a01c78db082434b8a4f8914abf0d1059d39f4426d16df20d72e1bd7ebb94a6a # shrinks to
|
||||
cc c450806df3921d1e6fe9b6af93d999e8196d0175b69b64f1810802582421e94a # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: CreateIndex { primary_key: Some("") }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||
cc fb6b98947cbdbdee05ed3c0bf2923aad2c311edc276253642eb43a0c0ec4888a # shrinks to task = Task { id: 0, index_uid: IndexUid("A"), content: CreateIndex { primary_key: Some("") }, events: [] }, index_exists = false, index_op_fails = true, any_int = 0
|
||||
cc 1aa59d8e22484e9915efbb5818e1e1ab684aa61b166dc82130d6221663ba00bf # shrinks to task = Task { id: 0, index_uid: IndexUid("a"), content: DocumentDeletion(Clear), events: [] }, index_exists = true, index_op_fails = false, any_int = 0
|
||||
cc 2e8644e6397b5f76e0b79f961fa125e2f45f42f26e03c453c9a174dfb427500d # shrinks to task = Task { id: 0, index_uid: IndexUid("0"), content: SettingsUpdate { settings: Settings { displayed_attributes: NotSet, searchable_attributes: NotSet, filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, synonyms: NotSet, distinct_attribute: NotSet, _kind: PhantomData }, is_deletion: false, allow_index_creation: false }, events: [] }, index_exists = false, index_op_fails = false, any_int = 0
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user