mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-18 12:20:48 +00:00
Compare commits
3191 Commits
release-v1
...
prototype-
Author | SHA1 | Date | |
---|---|---|---|
3fa40a3f9c | |||
5675847a5f | |||
1c77117d02 | |||
26dc415d9e | |||
89a4e7cee4 | |||
f2040e50b2 | |||
2b62e85622 | |||
c13e3d5c8a | |||
73a8018eb1 | |||
087866d59f | |||
9111f5176f | |||
b9dd092a62 | |||
ca99bc3188 | |||
2e49d6aec1 | |||
51043f78f0 | |||
a490a11325 | |||
101f5a20d2 | |||
6ce1ce77e6 | |||
ec8f685d84 | |||
5758268866 | |||
4d037e6693 | |||
96da5130a4 | |||
3e19702de6 | |||
1e762d151f | |||
0b38f211ac | |||
f6524a6858 | |||
65ad8cce36 | |||
42650f82e8 | |||
a37da36766 | |||
85d96d35a8 | |||
bf66e97b48 | |||
a7ea5ec748 | |||
dc7ba77e57 | |||
13f870e993 | |||
1a79fd0c3c | |||
f759ec7fad | |||
4d691d071a | |||
23d1c86825 | |||
c4a40e7110 | |||
e01980c6f4 | |||
25209a3590 | |||
3064ea6495 | |||
46ec8a97e9 | |||
c42a65a297 | |||
d08f8690d2 | |||
ad5f25d880 | |||
4d352a21ac | |||
4a4210c116 | |||
3533d4f2bb | |||
3625389057 | |||
eace6df91b | |||
83ab8cf4e5 | |||
cd2573fcc3 | |||
9f7981df28 | |||
e615fa5ec6 | |||
13f1277637 | |||
4919774f2e | |||
a3da680ce6 | |||
11e394dba1 | |||
469d2f2a9c | |||
ce6507d20c | |||
b92da5d15a | |||
ed3dfbe729 | |||
441641397b | |||
a35d3fc708 | |||
745c1a2668 | |||
a95128df6b | |||
e0537c3870 | |||
da220294f6 | |||
78e611f282 | |||
d8381eb790 | |||
b212aef5db | |||
6bf66f35be | |||
52ab114f6c | |||
dcbfecf42c | |||
9ca6f59546 | |||
aa7537a11e | |||
972bb2831c | |||
f9ddd32545 | |||
d5059520aa | |||
1c3642c9b2 | |||
d2d2bacaf2 | |||
30edba3497 | |||
84e7bd9342 | |||
2b74e4d116 | |||
b5fe0b2b07 | |||
0f0cd2d929 | |||
fc8c1d118d | |||
0548ab9038 | |||
143acb9cdc | |||
4b92f1b269 | |||
c12a1cd956 | |||
8af8aa5a33 | |||
6df2ba93a9 | |||
3680a6bf1e | |||
732c52093d | |||
05cc463fbc | |||
1afde4fea5 | |||
f8f190cd40 | |||
3a408e8287 | |||
d3e5b10e23 | |||
1aaf24ccbf | |||
90bc230820 | |||
342c4ff85d | |||
c85392ce40 | |||
8875d24a48 | |||
c470b67fa2 | |||
c0e081cd98 | |||
b60840ebff | |||
fdc1763838 | |||
75819bc940 | |||
7b8cc25625 | |||
2be641f373 | |||
ddcb661c19 | |||
d09b771bce | |||
d89d2efb7e | |||
f284a9c0dd | |||
134e7fc433 | |||
0cba919228 | |||
aa63091752 | |||
58735d6d8f | |||
1b514517f5 | |||
11f814821d | |||
30fb1153cc | |||
3b2c8b9f25 | |||
2a7f9adf78 | |||
608ceea440 | |||
79001b9c97 | |||
59b12fca87 | |||
48f5bb1693 | |||
93188b3c88 | |||
bc4efca611 | |||
feaf25a95d | |||
414b3fae89 | |||
899baa0ea5 | |||
374095d42c | |||
dd007dceca | |||
3ae587205c | |||
1bf2694604 | |||
ed9cc1af55 | |||
b41a6cbd7a | |||
c8af572697 | |||
249053e514 | |||
ff2cf2a5ae | |||
b448aca49c | |||
55bad07c16 | |||
380469665f | |||
3421125a55 | |||
0b2200e6e7 | |||
0fd5ab9fcc | |||
14293f6c8f | |||
d3a94e8b25 | |||
1944077a7f | |||
8195d366fa | |||
cfd1b2cc97 | |||
19b044b4e6 | |||
e0730b55b3 | |||
729fa3770d | |||
9cbc85b2f9 | |||
a3cf104736 | |||
a109802d45 | |||
2d8060df80 | |||
47b66e49b8 | |||
8f2e971879 | |||
654a3a9e19 | |||
d1fdbb63da | |||
fb9d9239b2 | |||
a7a0891210 | |||
84d9c731f8 | |||
11f4724957 | |||
85182497ab | |||
3e4a356638 | |||
dfd9c384aa | |||
f0b4046c43 | |||
4b953d62fb | |||
c2f4b6ced0 | |||
1e6cbcaf12 | |||
066c6bd875 | |||
fd583501d7 | |||
bff4bde0ce | |||
cd45d21d6e | |||
f9960be115 | |||
bd9aba4d77 | |||
8edad8291b | |||
b3f60ee805 | |||
5acf953298 | |||
d9cebff61c | |||
30f7bd03f6 | |||
df0d9bb878 | |||
5230ddb3ea | |||
d6a7c28e4d | |||
e55efc419e | |||
644e136aee | |||
ec0ecb5515 | |||
b4fabce36d | |||
9350a7b017 | |||
be69ab320d | |||
d59d75c9cd | |||
38b7b31beb | |||
7a01f20df7 | |||
c20c38a7fa | |||
5ab46324c4 | |||
325f17488a | |||
e7ff987c46 | |||
244003e36f | |||
1f813a6f3b | |||
96183e804a | |||
5cfb066b0a | |||
a5f44a5ceb | |||
7ab48ed8c7 | |||
a94e78ffb0 | |||
e7bb8c940f | |||
8cb85294ef | |||
d0e9d65025 | |||
540a396e49 | |||
a81165f0d8 | |||
d6585eb10b | |||
f7d90ad19f | |||
bc25f378e8 | |||
31630c85d0 | |||
ab09dc0167 | |||
618c54915d | |||
130d2061bd | |||
66ddee4390 | |||
90a6c01495 | |||
e58426109a | |||
f513cf930a | |||
8a13ed7e3f | |||
1b8e4d0301 | |||
996619b22a | |||
2c9822a337 | |||
7276deee0a | |||
6a068fe36a | |||
f7e7f438f8 | |||
8d826e478f | |||
ba8dcc2d78 | |||
4d308d5237 | |||
7ca91ebb71 | |||
1ba8a40d61 | |||
47f6a3ad3d | |||
b4c01581cd | |||
ae17c62e24 | |||
a1148c09c2 | |||
9c5f64769a | |||
ebe23b04c9 | |||
13b7c826c1 | |||
67fd3b08ef | |||
5440f43fd3 | |||
d9460a76f4 | |||
d1ddaa223d | |||
f7ecea142e | |||
337e75b0e4 | |||
b5691802a3 | |||
1690aec7f1 | |||
f267bed352 | |||
6e50f23896 | |||
597d57bf1d | |||
4c8a0179ba | |||
c69cbec64a | |||
01ac8344ad | |||
3508ba2f20 | |||
ce328c329d | |||
959e4607bb | |||
4b4ffb8ec9 | |||
3951fe22ab | |||
4d5bc9df4c | |||
ec2f8e8040 | |||
406b8bd248 | |||
62b9c6fbee | |||
b439d36807 | |||
faceb661e3 | |||
4129d657e2 | |||
1e6fe71a67 | |||
0fba08cd72 | |||
189d4c3b70 | |||
2fff6f7f23 | |||
fddfb37f1f | |||
52b4090286 | |||
3cabfb448b | |||
77cf5b3787 | |||
3acc5bbb15 | |||
114436926f | |||
0f7904fb38 | |||
3f13608002 | |||
590b1d8fb7 | |||
4708d9b016 | |||
0d2e7bcc13 | |||
55fbfb6124 | |||
be9741eb8a | |||
58fe260c72 | |||
24e5f6f7a9 | |||
0177d66149 | |||
9b87c36200 | |||
1861c69964 | |||
cb2b5eb38e | |||
53aa0a1b54 | |||
12b26cd54e | |||
061b1e6d7c | |||
0d6e8b5c31 | |||
d48cdc67a0 | |||
35c16ad047 | |||
2997d1f186 | |||
2a5997fb20 | |||
ee8a9e0bad | |||
3b0737a092 | |||
fdd02105ac | |||
aa9592455c | |||
01e24dd630 | |||
ae6bb1ce17 | |||
5fd28620cd | |||
728710d63a | |||
fa81381865 | |||
b96a682f16 | |||
d0f048c068 | |||
223e82a10d | |||
9507ff5e31 | |||
c2b025946a | |||
950f73b8bb | |||
3a818c5e87 | |||
7871d12025 | |||
d74134ce3a | |||
5ac129bfa1 | |||
e7153e0a97 | |||
37a24a4a05 | |||
3fb67f94f7 | |||
6592746337 | |||
cf5145b542 | |||
efea1e5837 | |||
b744f33530 | |||
31bb61ba99 | |||
abb4522f76 | |||
d4f54fc55e | |||
ef084ef042 | |||
3524bd1257 | |||
a50b058557 | |||
d4f6216966 | |||
77acafe534 | |||
53afda3237 | |||
abb19d368d | |||
b4a52a622e | |||
8d7d8cdc2f | |||
626a93b348 | |||
af65fe201a | |||
9b83b1deb0 | |||
e9eb271499 | |||
3281a88d08 | |||
5a644054ab | |||
16fefd364e | |||
e7994cdeb3 | |||
00bad8c716 | |||
862714a18b | |||
d18ebe4f3a | |||
7169d85115 | |||
f5f5f03ec0 | |||
9b2653427d | |||
56b7209f26 | |||
9b1f439a91 | |||
01c7d2de8f | |||
a86aeba411 | |||
514b60f8c8 | |||
a2b151e877 | |||
384fdc2df4 | |||
83e5b4ed0d | |||
272cd7ebbd | |||
c63c7377e6 | |||
9259cdb12e | |||
5b50e49522 | |||
65474c8de5 | |||
fbb1ba3de0 | |||
a59ca28e2c | |||
825f742000 | |||
dd491320e5 | |||
c6ff97a220 | |||
49240c367a | |||
1e6e624078 | |||
8b4e07e1a3 | |||
2853009987 | |||
aa59c3bc2c | |||
7b1d8f4c6d | |||
a49ddec9df | |||
05fe856e6e | |||
c0cdaf9f53 | |||
e9cf58d584 | |||
31628c5cd4 | |||
3004e281d7 | |||
14e8d0aaa2 | |||
1c58cf8426 | |||
5155fd2bf1 | |||
9ec9c204d3 | |||
78b9304d52 | |||
0465ba4a05 | |||
2099991dd1 | |||
c232cdabf5 | |||
4e266211bf | |||
57fa689131 | |||
10626dddfc | |||
9051065c22 | |||
e8c76cf7bf | |||
3f1729a17f | |||
cab2b6bcda | |||
c4979a2fda | |||
23931f8a4f | |||
aa414565bb | |||
1db152046e | |||
c27ea2677f | |||
caa1e1b923 | |||
71f18e4379 | |||
600e3dd1c5 | |||
362eb0de86 | |||
998d46ac10 | |||
6c85c0d95e | |||
0e1fbbf7c6 | |||
6806640ef0 | |||
173e37584c | |||
6ba4d5e987 | |||
dd12d44134 | |||
a61495d660 | |||
c8e251bf24 | |||
a938fbde4a | |||
dcf3f1d18a | |||
66d0c63694 | |||
132191360b | |||
345c99d5bd | |||
89d696c1e3 | |||
c645853529 | |||
a70ab8b072 | |||
48aae76b15 | |||
23bf572dea | |||
864f6410ed | |||
c9bf6bb2fa | |||
46249ea901 | |||
ce0d1e0e13 | |||
5065d8b0c1 | |||
a83007c013 | |||
79e0a6dd4e | |||
2d88089129 | |||
1d937f831b | |||
6c659dc12f | |||
a8531053a0 | |||
cf34d1c95f | |||
1a9c58a7ab | |||
64571c8288 | |||
72123c458b | |||
d5881519cb | |||
ea016d97af | |||
70c906d4b4 | |||
fa2ea4a379 | |||
030263caa3 | |||
c25779afba | |||
0f33a65468 | |||
7c9a8b1e1b | |||
f45daf8031 | |||
fb1260ee88 | |||
48a51e5cd6 | |||
2f8eb4f54a | |||
dea101e3d9 | |||
175e8a8495 | |||
6da54d0cb6 | |||
667bb87e35 | |||
df48ac8803 | |||
ff86073288 | |||
0ad53784e7 | |||
7935bef4cd | |||
e064c52544 | |||
e106b16148 | |||
eddefb0e0f | |||
dff2715ef3 | |||
5deea631ea | |||
c5f22be6e1 | |||
b4b859ec8c | |||
b1d61f5a02 | |||
febc8d1b52 | |||
7dc04747fd | |||
7c0cd7172d | |||
b99ef3d336 | |||
43ff236df8 | |||
19ab4d1a15 | |||
9287858997 | |||
7e2fd82e41 | |||
24c0775c67 | |||
3092cf0448 | |||
37d4551e8e | |||
da48506f15 | |||
2f5b9fbbd8 | |||
7faa9a22f6 | |||
370d88f626 | |||
d34faa8f9c | |||
e5d0bef6d8 | |||
76288fad72 | |||
076a3d371c | |||
3bbf760542 | |||
fd5c48941a | |||
df3986cd83 | |||
e704728ee7 | |||
34ed6518ae | |||
c0ede6d152 | |||
577e7126f9 | |||
22219fd88f | |||
a9e17ab8c6 | |||
2dd948a4a1 | |||
76cf1bff87 | |||
3d1046369c | |||
4f1ccbc495 | |||
37489fd495 | |||
c0d8eb295d | |||
bcd3f6054a | |||
3a0314f9de | |||
fa4d8b8348 | |||
d9e19c89c5 | |||
18bf740ee2 | |||
0202ff8ab4 | |||
fbe4ab158e | |||
92318ca573 | |||
6ca7a109b9 | |||
d4d4702f1b | |||
2648bbca25 | |||
562c86ea01 | |||
7ae10abb6b | |||
dc533584c6 | |||
442c1e36de | |||
66b5e4b548 | |||
89ac1015f3 | |||
ca25904c26 | |||
8a1b1a95f3 | |||
8d47d2d018 | |||
5082cd5e67 | |||
750a2b6842 | |||
bc7d4112d9 | |||
88a18677d0 | |||
68e30214ca | |||
b985b96e4e | |||
71e7900c67 | |||
431782f3ee | |||
3db613ff77 | |||
5822764be9 | |||
c63294f331 | |||
a529bf160c | |||
f1119f2dc2 | |||
1db7d5d851 | |||
80b060f920 | |||
fdf043580c | |||
f62703cd67 | |||
76f82c880d | |||
6eeba3a8ab | |||
28d6a4466d | |||
1ba2fae3ae | |||
28d6ab78de | |||
3ba5dfb6ec | |||
a23fbf6c7b | |||
596a98f7c6 | |||
14c4a222da | |||
690bb2e5cc | |||
d0f2c9c72e | |||
42577403d8 | |||
c8c5944094 | |||
4b65851793 | |||
10d4a1a9af | |||
ad35edfa32 | |||
033417e9cc | |||
ac5a1e4c4b | |||
3eb9a08b5c | |||
900bae3d9d | |||
28b7d73d4a | |||
6841f167b4 | |||
c88b6f331f | |||
09a94e0db3 | |||
39407885c2 | |||
a3e41ba33e | |||
ce807d760b | |||
bbecab8948 | |||
5cff435bf6 | |||
8aa808d51b | |||
1e9ac00800 | |||
b08a49a16e | |||
23f4e82b53 | |||
119e6d8811 | |||
a8f6f108e0 | |||
1479050f7a | |||
97b8c32e22 | |||
cb8d5f2d4b | |||
35f6c624bc | |||
1116788475 | |||
951a5b5832 | |||
1c670d7fa0 | |||
6cc3797aa1 | |||
faf1e17a27 | |||
4c519c2ab3 | |||
eb28d4c525 | |||
9ac981d025 | |||
74859ecd61 | |||
8ae441a4db | |||
042d86cbb3 | |||
dd120e0e16 | |||
18796d6e6a | |||
c91bfeaf15 | |||
91048d209d | |||
28961b2ad1 | |||
895ab2906c | |||
f11c7d4b62 | |||
e79f6f87f6 | |||
5367d8f05a | |||
52686da028 | |||
8c074f5028 | |||
49e18da23e | |||
54240db495 | |||
e1ed4bc750 | |||
9bd1cfb3a3 | |||
a341c94871 | |||
f46cf46b8c | |||
c3a30a5a91 | |||
143e3cf948 | |||
ab2adba183 | |||
74d1a67a99 | |||
91ce8a5e67 | |||
fd7ae1883b | |||
42a3cdca66 | |||
a43765d454 | |||
769576fd94 | |||
8fb7b1d10f | |||
d494c29768 | |||
74dcfe9676 | |||
1b1703a609 | |||
62358bd31c | |||
fb5e4957a6 | |||
8de3c9f737 | |||
43a19d0709 | |||
29d14bed90 | |||
f3b54337f9 | |||
7f3ae40204 | |||
a53536836b | |||
b095325bf8 | |||
d7ad39ad77 | |||
849de089d2 | |||
7f25007d31 | |||
c810af3ebf | |||
c0b77773ba | |||
7481559e8b | |||
83c765ce6c | |||
4c91037602 | |||
825923f6fc | |||
e405702733 | |||
6fa877efb0 | |||
4b1cd10653 | |||
47748395dc | |||
ff595156d7 | |||
8770088df3 | |||
827c1c8447 | |||
764df24b7d | |||
4570d5bf3a | |||
746b31c1ce | |||
eaad84bd1d | |||
c690c4fec4 | |||
ea9ac46f28 | |||
93db755d57 | |||
93f130a400 | |||
860c993ef7 | |||
67dda0678f | |||
2db6347686 | |||
421a9cf05e | |||
7b4b57ecc8 | |||
8f64fba1ce | |||
9882029fa4 | |||
5f56e6dd58 | |||
c88c3637b4 | |||
97fd9ac493 | |||
821d92b5d0 | |||
0b60928cbc | |||
42114325cd | |||
7a38fe624f | |||
1b005f697d | |||
fbec48f56e | |||
a377a49218 | |||
41cbaad1cb | |||
a015e232ab | |||
3ebc99473f | |||
fadea504ed | |||
d27007005e | |||
fcb09ccc3d | |||
734a9ecea8 | |||
69fcd3d05e | |||
1ca7778e6a | |||
a11d992923 | |||
781691191a | |||
ae8660e585 | |||
d80ce00623 | |||
2d66fdc8e9 | |||
b297b5deb0 | |||
0d71c80ba6 | |||
b2054d3f6c | |||
65a3086cf1 | |||
426d63b01b | |||
b078477d80 | |||
5c525168a0 | |||
39b62b7158 | |||
3f97f630ed | |||
0bc1a18f52 | |||
643d99e0f9 | |||
a36b1dbd70 | |||
5672165e44 | |||
d563ed8a39 | |||
36cae3b480 | |||
064158e4e2 | |||
77d32d0ee8 | |||
f4569b04ad | |||
2a1a7ef00a | |||
758b4acea7 | |||
a4e8158239 | |||
151e52c481 | |||
a2690ea8d4 | |||
33f61d2cd4 | |||
2922c5c899 | |||
7681be5367 | |||
50bc156257 | |||
d8207356f4 | |||
2d58b28f43 | |||
fd60a39f1c | |||
369c05732e | |||
34d04f3d3f | |||
a27f329e3a | |||
b216ddba63 | |||
d97fb6117e | |||
c45d1e3610 | |||
5c0668afcf | |||
20f05efb3c | |||
cbf029f64c | |||
bffabf9cc6 | |||
f647b20818 | |||
d91f8fc493 | |||
3296cf7ae6 | |||
89675e5f15 | |||
47b7d515ed | |||
2ba4629938 | |||
982dd76042 | |||
3505ee47f8 | |||
b2d25c07d7 | |||
b9d8bd77fc | |||
8a66ba01d8 | |||
8a6d548041 | |||
b452358124 | |||
bfb1f9279b | |||
48dabd27ea | |||
9c3830a19c | |||
ff6b8dfac4 | |||
ec7de4bae7 | |||
184b8afd9e | |||
29961b8c6b | |||
0b08413c98 | |||
474d4ec498 | |||
0544b60974 | |||
4223c51838 | |||
6f71a2b38b | |||
7c9935f96a | |||
f7ae8bc065 | |||
3d8a3d22d1 | |||
30f88350c7 | |||
4c4baaf1ce | |||
55e8046551 | |||
4e4d8dfda7 | |||
de3c4f1986 | |||
5f4497935f | |||
3f69dd6450 | |||
1c4b1b3b2d | |||
b4f1e9bc36 | |||
abd65d9307 | |||
30fc376713 | |||
2a1787ed22 | |||
d1a31afdd6 | |||
60018d0fe4 | |||
13b1abceaf | |||
3521a3a0b2 | |||
40a53f8824 | |||
f5ca421227 | |||
3f048927a0 | |||
e7c0617699 | |||
798aa4ee92 | |||
4fd6fd9bef | |||
a2cd7214f0 | |||
0ce1d6d487 | |||
b3166df7ea | |||
1803998017 | |||
1af3089456 | |||
d1fc42b53a | |||
e64571a881 | |||
497187083b | |||
32e2848a74 | |||
6b3da8a6de | |||
0769090dd6 | |||
1d507c84b2 | |||
1b78231e18 | |||
2b1f6a7f11 | |||
6993924f32 | |||
0c7d1f761e | |||
e3d30e28ef | |||
63af1e9f28 | |||
f073a86387 | |||
9dd01ff44b | |||
507a7bad96 | |||
cde62fcb5b | |||
03a82136dc | |||
e68758cec4 | |||
4fb47492e5 | |||
5bab8cf7ec | |||
97005dd505 | |||
eabef5194a | |||
ebb2494879 | |||
0cec352d2b | |||
a97281af08 | |||
55605435bc | |||
e6bea99974 | |||
9e32ac7cb2 | |||
302d6cccd7 | |||
21b7d709ad | |||
02fd06ea0b | |||
11ee7daa0f | |||
00746b32c0 | |||
1cce613399 | |||
be9786bed9 | |||
c3f4835e8e | |||
49f58b2c47 | |||
6a10e85707 | |||
c505fa9d7d | |||
9519e60f97 | |||
b5df889dcb | |||
31155dce4c | |||
8d36570958 | |||
939e7faf31 | |||
32c6062e65 | |||
f097aafa1c | |||
777b387dc4 | |||
b0f3dc2c06 | |||
4b166bea2b | |||
5943100754 | |||
b24def3281 | |||
402dcd6b2f | |||
13c95d25aa | |||
a8defb585b | |||
339a4b0789 | |||
904fd2f6d1 | |||
229405aeb9 | |||
249e051cd4 | |||
fc0e7382fe | |||
97fb64e40e | |||
69edbf9f6d | |||
8957251eed | |||
c72535531b | |||
916c23e7be | |||
ad9937c755 | |||
171c942282 | |||
e2ae3b24aa | |||
fc7618d49b | |||
7f88c4ff2f | |||
96d4242b93 | |||
5114686394 | |||
3322018c06 | |||
0276d5212a | |||
e2ffc3d69a | |||
739da9fd4d | |||
2af93966e0 | |||
2c47500bc3 | |||
406ee31d1a | |||
2d8d0af1a6 | |||
e0a8f8cb5a | |||
be3b00350c | |||
80d34a4169 | |||
e3ee553dcc | |||
bebd050961 | |||
1f1beae077 | |||
55724f2412 | |||
6d50ea0830 | |||
f37c86e0b2 | |||
098c410612 | |||
ee10cb8c87 | |||
d38cc73630 | |||
e688581c36 | |||
4ac8f96342 | |||
1c9555566e | |||
303d740245 | |||
250743885d | |||
5eecb8489d | |||
0e5c3b1f64 | |||
f53bdc4320 | |||
0a301b5f88 | |||
a993b68684 | |||
80c7a00567 | |||
67d8cec209 | |||
2a846aaae7 | |||
d6eacb2aac | |||
212dbfa3b5 | |||
456da5de9c | |||
46e26ab550 | |||
cda4ba2bb6 | |||
ae59d37b75 | |||
f2cf981641 | |||
50954d31fa | |||
1b5b5778c1 | |||
d3731dda48 | |||
51a2613c5c | |||
82e1c4f468 | |||
5bdf5c0aaf | |||
282b2e3b98 | |||
5e754b3ee0 | |||
e1612fcb01 | |||
9dd4b33a9a | |||
de22116b3d | |||
5f78522044 | |||
87e2bc3bed | |||
61b58b115a | |||
d3182f3830 | |||
f698e6cfdf | |||
f70856bab1 | |||
80588daae5 | |||
e2ebed62b1 | |||
8284bd760f | |||
8d0ace2d64 | |||
86c34a996b | |||
eba7af1d2c | |||
e0d24104a3 | |||
2db738dbac | |||
84dd2e4df1 | |||
3d06ea41ea | |||
3958db4b17 | |||
935a724c57 | |||
ed29cceae9 | |||
bb9e33bf85 | |||
7c0e544839 | |||
d19c8672bb | |||
57c9f03e51 | |||
467e742bd1 | |||
cd5aaa3a9f | |||
8ceb199dca | |||
777eb3fa00 | |||
0caadedd3b | |||
ac3baafbe8 | |||
990a861241 | |||
d95d02cb8a | |||
f00108d2ec | |||
f7c8730d09 | |||
a651397afc | |||
2000db8453 | |||
92cc3550d8 | |||
cd3bca06e9 | |||
87576cf26c | |||
6dc6a5d874 | |||
e75829aded | |||
d00d2aab3f | |||
f46a8ab2e2 | |||
c3b75bbe5d | |||
c7711daca3 | |||
f18a4581f1 | |||
8ce8bbcdfc | |||
bd12989610 | |||
24a298a83c | |||
d85cd9bf1a | |||
37b3c5c323 | |||
1b1ad1923b | |||
a836b8e703 | |||
3328560788 | |||
cf76ec7b37 | |||
abf1cf9cd5 | |||
b09676779d | |||
70465aa5ce | |||
3009981d31 | |||
401e956128 | |||
48eafc546f | |||
6add470805 | |||
13175f2339 | |||
1a1ad8a792 | |||
4492605a78 | |||
fe5a0219e1 | |||
5ff066c3e7 | |||
6770eb2a87 | |||
0d43ddbd85 | |||
3950ec8d3c | |||
3b35ebda50 | |||
4bcfd14a45 | |||
a07f0a4a43 | |||
2dec6e86e9 | |||
c965200010 | |||
d55f0e2e53 | |||
d53a80b408 | |||
ecb88143f9 | |||
03eb5d87c1 | |||
a1d7ed1258 | |||
f3c0b05ae8 | |||
f4ec1abb9b | |||
d35afa0cf5 | |||
752d031010 | |||
c7322f704c | |||
811f156031 | |||
d8fed1f7a9 | |||
2e539249cb | |||
488d31ecdf | |||
af33d22f25 | |||
f1da623af3 | |||
77f1ff019b | |||
2aa11afb87 | |||
bb9ce3c5c5 | |||
d187b32a28 | |||
c8c666c6a6 | |||
3e190503e6 | |||
709ab3c14c | |||
ef13c6a5b6 | |||
6a10b679ca | |||
62816dddde | |||
54c0cf93fe | |||
365f44c39b | |||
2fa85a24ec | |||
631e9910da | |||
2741756248 | |||
d3f95e6c69 | |||
b7f2428961 | |||
3b1f908e5e | |||
14ca8048a8 | |||
206a3e00e5 | |||
f198b20c42 | |||
e3ba1fc883 | |||
ab5e56fd16 | |||
d885de1600 | |||
ee1abfd1c1 | |||
2295e0e3ce | |||
acc8caebe6 | |||
a034a1e628 | |||
1165ba2171 | |||
0ade699873 | |||
d0109627b9 | |||
a2270b7432 | |||
1ecd3bb822 | |||
51961e1064 | |||
cb8442a119 | |||
3baa34d842 | |||
86d9f50b9c | |||
de52a9bf75 | |||
985a94adfc | |||
b1ab09196c | |||
3d7ed3263f | |||
fca4577e23 | |||
27454e9828 | |||
bee3c23b45 | |||
b2f01ad204 | |||
9026867d17 | |||
330c9eb1b2 | |||
485a72306d | |||
9b55e582cd | |||
3d145d7f48 | |||
982efab88f | |||
079ed4a992 | |||
afdf87f6f7 | |||
a7201ece04 | |||
36296bbb20 | |||
07ff92c663 | |||
61252248fb | |||
68cbcdf08b | |||
85824ee203 | |||
d30c89e345 | |||
e8a156d682 | |||
fb8d23deb3 | |||
e570c23153 | |||
bd2c0e1ab6 | |||
39a4a0a362 | |||
22d80eeaf9 | |||
6cc91824c1 | |||
5a904cf29d | |||
b8a1caad5e | |||
63ef0aba18 | |||
7913d6365c | |||
c3f49f766d | |||
e883bccc76 | |||
c8f16530d5 | |||
9d27ac8a2e | |||
42cdc38c7b | |||
2ce025a906 | |||
17f7922bfc | |||
6b2fe94192 | |||
004c09a8e2 | |||
36bd66281d | |||
d11a6e187f | |||
9a569d73d1 | |||
be302fd250 | |||
d76d0cb1bf | |||
2bf867982a | |||
f3874d58b9 | |||
a983129613 | |||
f11a4087da | |||
176ffd23f5 | |||
ab2f6f3aa4 | |||
e6e76fbefe | |||
178d00f93a | |||
830a7c0c7a | |||
18d578dfc4 | |||
072b576514 | |||
6c3a5d69e1 | |||
a7de4f5b85 | |||
264a04922d | |||
1dbbd8694f | |||
bdeb47305e | |||
19b2326f3d | |||
81919a35a2 | |||
516e838eb4 | |||
fc03e53615 | |||
6603437cb1 | |||
6f55e7844c | |||
cf203b7fde | |||
d71bc1e69f | |||
a396806343 | |||
fad0de4581 | |||
c2ca259f48 | |||
4c481a8947 | |||
beb987d3d1 | |||
95e45e1c2c | |||
59fe1e8efa | |||
f30979d021 | |||
85f3028317 | |||
8195fc6141 | |||
32f825d442 | |||
ff8b2d4422 | |||
6cb8b46900 | |||
8c9245149e | |||
2000f7958d | |||
63e79a9039 | |||
7f9680f0a0 | |||
53503f09ca | |||
6fbf5dac68 | |||
98fc093823 | |||
5cfb5df31e | |||
55d889522b | |||
762e320c35 | |||
358aa337ea | |||
1764a33690 | |||
a90d7e4cc7 | |||
aec220ab63 | |||
4348c49656 | |||
a18de9b5f0 | |||
f9c2dacf33 | |||
7d247353d0 | |||
bc502ee125 | |||
00c02d00f3 | |||
804db03e41 | |||
26efdf4dd9 | |||
4b903719a0 | |||
ed3d87f061 | |||
a3622eda46 | |||
513a38f07b | |||
e1e025c319 | |||
b6fe6838d3 | |||
d94339a858 | |||
15d478cf4d | |||
add96f921b | |||
4fc6331cb6 | |||
753e76d451 | |||
3794962330 | |||
2865b063ad | |||
d4d7c9d577 | |||
f8697075ea | |||
7cd0aea1d3 | |||
69b2d31b71 | |||
8cd5200f48 | |||
99b45a7820 | |||
5e07ea79c2 | |||
3af3d3f7d9 | |||
549fa12d5a | |||
077dcd2002 | |||
2907928d93 | |||
fe3973a51c | |||
c83c3cd796 | |||
b9539c59f3 | |||
f2b140d3d7 | |||
e3400a05d3 | |||
b308463022 | |||
5e85059a71 | |||
9e661f2cb9 | |||
44192d754f | |||
1fa851a8d0 | |||
61abc61a69 | |||
efee0e3f43 | |||
0639b14906 | |||
f7c352a32d | |||
bf750e45a1 | |||
a38608fe59 | |||
97a04887a3 | |||
17d020e996 | |||
c3363706c5 | |||
2c2f3d38cc | |||
7f92116b51 | |||
0b55e7ce6a | |||
f6024b3269 | |||
a79ff8a1a9 | |||
e314423653 | |||
d0521e493f | |||
9ed7324995 | |||
e140227065 | |||
18886dc6b7 | |||
5391e3842c | |||
f9029727e0 | |||
a5b9a35c50 | |||
ba5ca8a362 | |||
5943e1c3b2 | |||
b46225070f | |||
e7624abe63 | |||
993aa1321c | |||
bff9653050 | |||
9640976c79 | |||
60a7221827 | |||
afc10acd19 | |||
c7a86b56ef | |||
9b6602cba2 | |||
8a271223a9 | |||
dd34dbaca5 | |||
5d74ebd5e5 | |||
9af69c151b | |||
c51dcad51b | |||
98f0da6b38 | |||
b030efdc83 | |||
84a784834e | |||
79094bcbcf | |||
497f9817a2 | |||
4aae07d5f5 | |||
e96b852107 | |||
238a7be58d | |||
b09a8f1b91 | |||
087da5621a | |||
fb95e67a2a | |||
e4a52e6e45 | |||
8c3f1a9c39 | |||
e9e2349ce6 | |||
2668f841d1 | |||
7384650d85 | |||
39869be23b | |||
6cc975704d | |||
93252769af | |||
196f79115a | |||
d10d78d520 | |||
4ecfb95d0c | |||
2fd20fadfc | |||
ca97cb0eda | |||
90a304cb07 | |||
cc7415bb31 | |||
44744d9e67 | |||
01675771d5 | |||
258c3dd563 | |||
39687908f1 | |||
8d4b21a005 | |||
cf0cd92ed4 | |||
cd2635ccfc | |||
78d9f0622d | |||
4f9edf13d7 | |||
405555b401 | |||
1bc4788e59 | |||
ef75a77464 | |||
7309111433 | |||
f6f8f543e1 | |||
34c991ea02 | |||
06f3fd8c6d | |||
474500362c | |||
ea4a96761c | |||
220921628b | |||
044356d221 | |||
d350114159 | |||
86807ca848 | |||
306593144d | |||
5d59bfde8a | |||
f55034ed54 | |||
03e679b634 | |||
f20e588ec1 | |||
20be69e1b9 | |||
293a246af8 | |||
dea00311b6 | |||
fb2b6c0c28 | |||
6f49126223 | |||
12920f2a4f | |||
4b7fd4dfae | |||
ce560fdcb5 | |||
748bb86b5b | |||
051f24f674 | |||
d2e01528a6 | |||
a9c7d82693 | |||
4bba2f41d7 | |||
8ac24d3114 | |||
6066256689 | |||
3a734af159 | |||
b9907997e4 | |||
ef889ade5d | |||
334098a7e0 | |||
8f73251012 | |||
b389be48a0 | |||
950d8e4c44 | |||
58cb1c1bda | |||
acff17fb88 | |||
21284cf235 | |||
50f6524ff2 | |||
e8987cf5aa | |||
d6f9a60a32 | |||
7fc35c5586 | |||
f156d7dd3b | |||
1fe224f2c6 | |||
07003704a8 | |||
e1bc610d27 | |||
d5e9b7305b | |||
cbb3b25459 | |||
941af58239 | |||
41a0ce07cb | |||
1506683705 | |||
d0eee5ff7a | |||
aed8c69bcb | |||
1eb1e73bb3 | |||
4f0bd317df | |||
80b962b4f4 | |||
ea0642c32d | |||
c17d616250 | |||
30bd4db0fc | |||
392472f4bb | |||
bd15f5625a | |||
722db7b088 | |||
a5c9162250 | |||
0388b2d463 | |||
dc64170a69 | |||
72452f0cb2 | |||
a8641b42a7 | |||
453d593ce8 | |||
5704235521 | |||
f6415b679f | |||
2d79720f5d | |||
8ddb4e750b | |||
a277daa1f2 | |||
fb794c6b5e | |||
1237cfc249 | |||
d7fd5c58cd | |||
fc9f3f31e7 | |||
ab1571cdec | |||
8270e2b768 | |||
e261ef64d7 | |||
1da4ab5918 | |||
448114cc1c | |||
25e768f31c | |||
192793ee38 | |||
a892a4a79c | |||
dc61105554 | |||
2eec290424 | |||
5d149d631f | |||
0bbcc7b180 | |||
d1a4da9812 | |||
c8ebf0de47 | |||
905af2a2e9 | |||
742543091e | |||
5f1bfb73ee | |||
6a0a0ae94f | |||
ea852200bb | |||
dc3f092d07 | |||
8ebf5eed0d | |||
19eb3b4708 | |||
2ceeb51c37 | |||
399eec5c01 | |||
fcfc4caf8c | |||
0146175fe6 | |||
cefffde9af | |||
bdc4263883 | |||
a97d4d63b9 | |||
f29114f94a | |||
a4ceef9624 | |||
6d0498df24 | |||
e8297ad27e | |||
419ce3966c | |||
eb63af1f10 | |||
048e174efb | |||
5d79617a56 | |||
ce90fc628a | |||
aae03356cb | |||
ebddfdb9a3 | |||
eeba196053 | |||
1bfdcfc84f | |||
dd1e606f13 | |||
250be9fe6c | |||
62692c171d | |||
9bc7627e27 | |||
b61efd09fc | |||
eaf28b0628 | |||
3b309f654a | |||
2700d8dc67 | |||
77c837fc1b | |||
446439e8be | |||
c6f4775fde | |||
3ff03a3f5f | |||
83ad1aaf05 | |||
cc48992e79 | |||
68bb170732 | |||
238692a8e7 | |||
290a40b7a5 | |||
d546f6f40e | |||
38a8d3cae1 | |||
f5c3b951bc | |||
d7c248042b | |||
d2f84a9d9e | |||
4f547eff02 | |||
64b833410c | |||
31f749b5d8 | |||
a0ab90a4d7 | |||
a59ae19842 | |||
2652310f2a | |||
adbb0ff318 | |||
0a5d1a445e | |||
447195a27a | |||
177154828c | |||
0d1d354052 | |||
f1d848bb9a | |||
676187ba43 | |||
90afde435b | |||
19d44142a1 | |||
445d5474cc | |||
69931e50d2 | |||
52a494bd3b | |||
9580b9de79 | |||
a762d7f462 | |||
56ee9cc21f | |||
2a505503b3 | |||
bae4007447 | |||
7313d6c533 | |||
306d2f37ff | |||
478dbfa45a | |||
d0aaa7ff00 | |||
31776fdc3f | |||
05ae6dbfa4 | |||
78f76c841d | |||
d212dc6b8b | |||
a5c790bf4b | |||
6ce1c6487a | |||
727d663f28 | |||
7aabe42ae0 | |||
dd186533f0 | |||
4dd7b20c32 | |||
4dd3675d2b | |||
86ac8568e6 | |||
192e024ada | |||
ac6df0df57 | |||
c19c17eddb | |||
74d1914a64 | |||
582930dbbb | |||
9f78e392b1 | |||
25fc576696 | |||
69dc4de80f | |||
ac975cc747 | |||
8993fec8a3 | |||
754f48a4fb | |||
cd7c6e19ed | |||
19dac01c5c | |||
895f5d8a26 | |||
3389561f34 | |||
137434a1c8 | |||
08c6d50cd1 | |||
cf3e574cb4 | |||
0af399a6d7 | |||
f586028f9a | |||
e1e85267fd | |||
51809eb260 | |||
484a9ddb27 | |||
65e6aa0de2 | |||
f3b9f7b867 | |||
48cdfddebf | |||
c55368ddd4 | |||
60ccb3fa4c | |||
5ad5d56f7e | |||
0c2c8af44e | |||
2fe9a02b1c | |||
211c8763b9 | |||
7e47031bdc | |||
f820c9804d | |||
3cb1f6d0a1 | |||
1ee3d6ae33 | |||
312515dd6b | |||
3eb3f0269e | |||
9db86aac51 | |||
2aae19dc52 | |||
a4d343aade | |||
c2bd94c871 | |||
7d1c2d97bf | |||
d388ea0f9d | |||
ec89030483 | |||
5c29258e8e | |||
2fdf520271 | |||
f19d2dc548 | |||
5adeac8047 | |||
7cb7643565 | |||
d138b3c704 | |||
fa6f495662 | |||
8cc86d5a8d | |||
5e562ffecf | |||
2277172f9c | |||
2db3d60259 | |||
7e19bf1c0e | |||
fb192aaa9f | |||
e1e362fa43 | |||
08753d002a | |||
8d15ae37a1 | |||
3e53791de3 | |||
8010eca9c7 | |||
dc0d4addd9 | |||
71414630fc | |||
2e0089d5ff | |||
3a2451fcba | |||
eb5830aa40 | |||
d81a3f4a74 | |||
c7d0097c97 | |||
152a10344c | |||
04eb32e539 | |||
8b14090927 | |||
ea4bb9402f | |||
f1115e274f | |||
a68e3a79fb | |||
8d630a6f62 | |||
d362278a41 | |||
00f78d6b5a | |||
399fba16bb | |||
c2469b6765 | |||
7791ef90e7 | |||
ee64f4a936 | |||
456887a54a | |||
b3cec1a383 | |||
436d2032c4 | |||
3828635fb2 | |||
dda28d7415 | |||
cd83014fff | |||
bbb6728d2f | |||
49fbbacafc | |||
7ad582f39f | |||
aa896f0e7a | |||
0261a0e3cf | |||
5809d3ae0d | |||
827cedcd15 | |||
011f8210ed | |||
6b0737384b | |||
e153418b8a | |||
c8306616e0 | |||
9383629d13 | |||
a16de5de84 | |||
a769e09dfa | |||
9ac2fd1c37 | |||
80ae020bee | |||
bab898ce86 | |||
c8ed1675a7 | |||
b1905dfa24 | |||
ab458d8840 | |||
4f3ce6d9cd | |||
ee1d627803 | |||
4ae7aea3b2 | |||
aadb0c58c9 | |||
86249e2ae4 | |||
b799f3326b | |||
fa7d3a37c0 | |||
3bb1e35ada | |||
56e0edd621 | |||
a93cd8c61c | |||
b3f0f39106 | |||
6dc345bc53 | |||
bd30ee97b8 | |||
29c5f76d7f | |||
734d0899d3 | |||
4428cb5909 | |||
844f546a8b | |||
3be1790803 | |||
d96e72e5dc | |||
201fea0fda | |||
5cfd3d8407 | |||
9eec44dd98 | |||
b85cd4983e | |||
dac81b2d44 | |||
ab185a59b5 | |||
59e41d98e3 | |||
1810927dbd | |||
b7694c34f5 | |||
6cabd47c32 | |||
9963f11172 | |||
c8d3a09af8 | |||
bfd81ce050 | |||
6b2c2509b2 | |||
56b4f5dce2 | |||
21ae4143b1 | |||
e8f06f6c06 | |||
6dd2e4ffbd | |||
ba0bb29cd8 | |||
c4c6e35352 | |||
8d46a5b0b5 | |||
5451c64d5d | |||
0a77be4ec0 | |||
5f9f82757d | |||
f82d4b36eb | |||
c882d8daf0 | |||
7e9d56a9e7 | |||
900825bac0 | |||
3e67d8818c | |||
284d8a24e0 | |||
30a2711bac | |||
0fd55db21c | |||
559e46be5e | |||
8b1e5d9c6d | |||
774fa8f065 | |||
9bbffb8fee | |||
48a5ce7434 | |||
6bf9824fec | |||
853b4a520f | |||
2cb71dff4a | |||
1941072bb2 | |||
fdaf45aab2 | |||
950a740bd4 | |||
66020cd923 | |||
4c4b336ecb | |||
286dd7b2e4 | |||
55af85db3c | |||
9102de5500 | |||
a1a3a49bc9 | |||
5a24e60572 | |||
9fe40df960 | |||
d5ddc6b080 | |||
d2d930dd3f | |||
3e34981d9b | |||
6ef3bb9d83 | |||
f782fe2062 | |||
c4653347fd | |||
d8dd357326 | |||
6a77c81a28 | |||
e10c26e70d | |||
ddf78a735b | |||
2c7cafbf20 | |||
86dd88698d | |||
b82f46e862 | |||
5dc464b9a7 | |||
90276d9a2d | |||
49d59d88c2 | |||
5863afa1a5 | |||
adc71742c8 | |||
cb6b6915a4 | |||
2a31cd13c9 | |||
4822fe1beb | |||
f04ab67083 | |||
ad4c982c68 | |||
3f24555c3d | |||
628c835a22 | |||
8efac33b53 | |||
d127c57f2d | |||
d633ac5b9d | |||
d68fe2b3c7 | |||
08a06b49f0 | |||
d87e8b63a9 | |||
0c5f4ed7de | |||
21ec334dcc | |||
63682c2c9a | |||
288a879411 | |||
712bf035a7 | |||
5e08fac729 | |||
92e2e09434 | |||
290a29b5fb | |||
1ae13c1374 | |||
a8d28e364d | |||
2ef5751795 | |||
8bb45956d4 | |||
3cbadf92b6 | |||
db3a1905de | |||
6cf82ba993 | |||
66c6d5e1ef | |||
df518d8b0b | |||
d9ed9de2b0 | |||
51cf44d6fd | |||
d5b8b5a2f8 | |||
8d26f3040c | |||
21898ffc60 | |||
04b1bbf932 | |||
382be56d36 | |||
acfc96525c | |||
a820aa11e6 | |||
8d2e3e4aba | |||
ab5247dc64 | |||
acd9535588 | |||
19bfb2649b | |||
25123af3b8 | |||
ff8d7a810d | |||
f367cc2e75 | |||
f2984f66e6 | |||
0defeb268c | |||
030064da25 | |||
84035a27f5 | |||
0885fcf973 | |||
48542ac8fd | |||
ea15ad6c34 | |||
d03b3ceb58 | |||
5d58cb7449 | |||
c5a996aa78 | |||
1279c38ac9 | |||
267d14c28d | |||
bd2262ceea | |||
13de251047 | |||
fda4f229bb | |||
2468ebb76b | |||
9142ba9dd4 | |||
d59bcea749 | |||
7541ab99cd | |||
d0aabde502 | |||
55e6cb9c7b | |||
642c01d0dc | |||
d852dc0d2b | |||
fb79c32430 | |||
51d1e64b23 | |||
e9c02173cf | |||
dbba5fd461 | |||
e760e02737 | |||
d59e559317 | |||
2ec8542105 | |||
28692f65be | |||
5404bc02dd | |||
c90fa95f93 | |||
822f67e9ad | |||
d28f18658e | |||
38d23546a5 | |||
c63f945093 | |||
0f213f2202 | |||
de808a391a | |||
0d282e3cc5 | |||
d342c3c357 | |||
f9b214f34e | |||
e1cc025cbd | |||
f04cd19886 | |||
1514dfa1b7 | |||
23ea3ad738 | |||
e3c34684c6 | |||
b5f01b52c7 | |||
fb51d511be | |||
9f2ff71581 | |||
fd177b63f8 | |||
8433516d85 | |||
0c84a40298 | |||
74962b2fd9 | |||
01968d7ca7 | |||
367f403693 | |||
8f4499090b | |||
4c516c00da | |||
d1ac40ea14 | |||
15bbde1022 | |||
c0313f3026 | |||
2d7607734e | |||
5ab505be33 | |||
c10f58b7bd | |||
e752bd06f7 | |||
30247d70cd | |||
0605c0ac68 | |||
b22c80106f | |||
c94952e25d | |||
e1053989c0 | |||
559e019de1 | |||
660eac50b2 | |||
92804f6f45 | |||
0fcde35a20 | |||
3c7ea1d298 | |||
74594be234 | |||
3d99686f7a | |||
c039562723 | |||
9bdcd42b9b | |||
4cae691b86 | |||
6a1216bd51 | |||
02a21fd309 | |||
98a365aaae | |||
d671d6f0f1 | |||
11a056d116 | |||
94011bb9a8 | |||
1c6c89f345 | |||
25faef67d0 | |||
65519bc04b | |||
ef59762d8e | |||
80dcfd5c3e | |||
ee856a7a46 | |||
32bd9f091f | |||
90f49eab6d | |||
49c2db9485 | |||
57502fcf6a | |||
c83b77304a | |||
1b3923b5ce | |||
26629a3f9e | |||
8970246bc4 | |||
cc32519a2d | |||
6e977dd8e8 | |||
68f1db123a | |||
35f9499638 | |||
64ef5869d7 | |||
2c14efa8a2 | |||
6eb47ab792 | |||
21b78f3926 | |||
09b4281cff | |||
721fc294be | |||
8dff08d772 | |||
7c3017734a | |||
bff48681d2 | |||
519d6b2bf3 | |||
73df873f44 | |||
99197387af | |||
f28600031d | |||
0ea0146e04 | |||
a211a9cdcd | |||
9b24f83456 | |||
2c6d08c519 | |||
18eb4b9c51 | |||
cf98bf37d0 | |||
bc9daf9041 | |||
9c36e497d9 | |||
6515838d35 | |||
ea52aff6dc | |||
ef0d5a8240 | |||
15bd14297e | |||
21d115dcbb | |||
959ca66125 | |||
7483c7513a | |||
e5af3ac65c | |||
6831c23449 | |||
5c01e9bf7c | |||
075d9c97c0 | |||
b249989bef | |||
070ec9bd97 | |||
27a6a26b4b | |||
76d961cc77 | |||
8234f9fdf3 | |||
7328ffb034 | |||
3e5550c910 | |||
72a9071203 | |||
07a5ffb04c | |||
a58bc5bebb | |||
b1a0110a47 | |||
d0fe9dea61 | |||
b165c77fa7 | |||
54aec7ac5f | |||
a2fc74f010 | |||
743ed9f57f | |||
7b3bac46a0 | |||
3be37b00e7 | |||
702589104d | |||
cb9e7e510b | |||
0c0038488c | |||
5d3af5f273 | |||
76a2adb7c3 | |||
5a6d22d4ec | |||
08ae47e475 | |||
056ff13c4d | |||
9f1e0d2a49 | |||
ed6db19681 | |||
9875f2646a | |||
183d3dada7 | |||
2be755ce75 | |||
3599df77f0 | |||
d7943fe225 | |||
6758146213 | |||
9b8ab40d80 | |||
baddd80069 | |||
f9445c1d90 | |||
15c29cdd9b | |||
13d8272173 | |||
208903ddde | |||
679fe18b17 | |||
3fcccc31b5 | |||
430e9b13d3 | |||
53c79e85f2 | |||
2e62925a6e | |||
0f86d6b28f | |||
8d70b01714 | |||
1327807caa | |||
c8d03046bf | |||
3942b3732f | |||
7cd9109e2f | |||
4e113bbf1b | |||
e25ca9776f | |||
6c9165b6a8 | |||
efb2f8b325 | |||
d6ba84ea99 | |||
c27870e765 | |||
01dedde1c9 | |||
7e5c5c4d27 | |||
c634d43ac5 | |||
6c15f50899 | |||
e1d81342cf | |||
423baac08b | |||
36281a653f | |||
f8fe9316c0 | |||
661bc21af5 | |||
b6af84eb77 | |||
7906461c14 | |||
2e4604b0b9 | |||
4c34164d2e | |||
9df4f3aaad | |||
513d3178c6 | |||
2209acbfe2 | |||
70121e3c6b | |||
59cc59e93e | |||
493d9b98f5 | |||
efaef4f748 | |||
7666e4f34a | |||
2ea2f7570c | |||
e750465e15 | |||
aa5e099718 | |||
c7db4176f3 | |||
a3e7c468cd | |||
cd359cd96e | |||
5de5dd80a3 | |||
2c65781d91 | |||
6e3b869e6a | |||
86ead92ed5 | |||
9a266a531b | |||
3f7f24b90e | |||
c5a6075484 | |||
360c5ff3df | |||
d323e35001 | |||
70f576d5d3 | |||
28f9be8d7c | |||
469d92c569 | |||
7a90a101ee | |||
f7796edc7e | |||
ac1df9d9d7 | |||
50ad750ec1 | |||
8748df2ca4 | |||
8f6b6c9042 | |||
07fb6d64e5 | |||
e45c846af5 | |||
dd56e82dba | |||
99889a0ed0 | |||
799f3d43c8 | |||
ed7fd855af | |||
2dfe24f067 | |||
a2743baaa3 | |||
b65aa7b5ac | |||
11dfe38761 | |||
dde1da1c0e | |||
085bc6440c | |||
1bd15d849b | |||
ea4bd29d14 | |||
5ed75de0db | |||
3296bb243c | |||
75d341d928 | |||
31c18f0953 | |||
05d8a33a28 | |||
c9092c72bf | |||
d9eba9d145 | |||
0ee67bb7d1 | |||
22551d0941 | |||
26b5dad042 | |||
6a057a3bd0 | |||
2e49230ca2 | |||
7ad0214089 | |||
1df5b8712b | |||
bfedbc1b6d | |||
68c758a533 | |||
d2427f18e5 | |||
00f94b1ffd | |||
0e8665bf18 | |||
f65153ad64 | |||
adddf3f179 | |||
785c1372f2 | |||
3580b2d803 | |||
3a12f5887e | |||
a80dcfd4a3 | |||
b2a332599e | |||
8046ae4bd5 | |||
1988416295 | |||
3b479948c6 | |||
cc732fe95e | |||
c7cb816ae1 | |||
4c09f6838f | |||
b188063869 | |||
0f8320bdc2 | |||
551df0cb77 | |||
87dd441a3a | |||
1eacab2169 | |||
b806097141 | |||
218f0a6661 | |||
47ee93b0bd | |||
1e5e3d57e2 | |||
023446ecf3 | |||
86e272856a | |||
257e621d40 | |||
113a061bee | |||
ad3befaaf5 | |||
176160d32f | |||
16790ee620 | |||
78b0bce9a1 | |||
2837cab5da | |||
2e99fa8251 | |||
fe9f380993 | |||
f8ecbc28e2 | |||
700318dc62 | |||
9d9010e45f | |||
aa6c5df0bc | |||
94764e5c7c | |||
31c8de1cca | |||
0d104a0fce | |||
3f1453f470 | |||
f4b8e5675d | |||
3b7a2cdbce | |||
203aa727a7 | |||
eaba772f21 | |||
9a920d1f93 | |||
5e683ba472 | |||
f6c6b026bb | |||
c695a1ffd2 | |||
91ce4d1721 | |||
3b1885859d | |||
2741aa8589 | |||
a43f99c600 | |||
90d64d257f | |||
f167f7b412 | |||
4af31ec9a6 | |||
cfc62a1c15 | |||
26deeb45a3 | |||
3fc145c254 | |||
a84f3a8b31 | |||
c81ff22c5b | |||
bad8ea47d5 | |||
b15c77ebc4 | |||
4b618b95e4 | |||
2988d3c76d | |||
e5ef0cad9a | |||
4f69b190bc | |||
7ae2a7341c | |||
6d5762a6c8 | |||
ebf82ac28c | |||
bd4c248292 | |||
e8c093c1d0 | |||
f0b74637dc | |||
b1bf7d4f40 | |||
aca707413c | |||
a8a1f5bd55 | |||
dc84ecc40b | |||
7483614b75 | |||
4820ac71a6 | |||
13c78e5aa2 | |||
5bb175fc90 | |||
f73273d71c | |||
4fd0116a0d | |||
ea2f2ecf96 | |||
4b459768a0 | |||
6d70978edc | |||
216a8aa3b2 | |||
a21c854790 | |||
70ab2c37c5 | |||
b4b6ba6d82 | |||
3b9f1db061 | |||
d344489c12 | |||
44d6b6ae9e | |||
8d9c2c4425 | |||
b22aac92ac | |||
932998f5cc | |||
86c3b0c8c2 | |||
e54280fbfc | |||
d18ee58ab9 | |||
63bc231243 | |||
68856e5e2f | |||
8a088fb99e | |||
20ad43b908 | |||
772e55d174 | |||
d160305868 | |||
9961b78b06 | |||
eb7b9d9dbf | |||
f5e418ace7 | |||
48d211b8b0 | |||
dbd91e7151 | |||
720becb5e8 | |||
e2cefc9b4f | |||
a0b3620b05 | |||
cd043d4461 | |||
5989528833 | |||
fd3daa4423 | |||
8dca36433c | |||
446ed17589 | |||
0be09555f1 | |||
c2517e7d5f | |||
5cbe879325 | |||
741a4444a9 | |||
7f7fafb857 | |||
db0c681bae | |||
46f7df232a | |||
285849e3a6 | |||
a589f6c60b | |||
3e0a78acf3 | |||
4860fd4529 | |||
b3a22f31f6 | |||
9452fabfb2 | |||
8f702828ca | |||
e09eec37bc | |||
fc7cc770d4 | |||
a2f59a28f7 | |||
5c962c03dd | |||
2d1727697d | |||
823da19745 | |||
1d314328f0 | |||
3aaf1d62f3 | |||
0e379558a1 | |||
d6bba0663a | |||
0b02eb456c | |||
df38794c7d | |||
6cdb6722d1 | |||
d106eb5b90 | |||
5e639bc0c1 | |||
49a6d2d5f1 | |||
f230ae6fd5 | |||
c8930781eb | |||
01461af333 | |||
c51bb6789c | |||
af65485ba7 | |||
f2e1591826 | |||
2f20257070 | |||
794c0f64a9 | |||
731e0e5321 | |||
89d0758713 | |||
879d5e8799 | |||
88f6c18665 | |||
aa1ce97748 | |||
c084f7f731 | |||
0d1f83ba4b | |||
922f9fd4d5 | |||
4b99d8cb91 | |||
41fc0dcb62 | |||
d1df0d20f9 | |||
1b7f6ea1e7 | |||
71602e0f1b | |||
407f53872a | |||
687cd2e205 | |||
198c416bd8 | |||
6cb9c3b81f | |||
2a67308e29 | |||
42cf847a63 | |||
c4275f0d27 | |||
ecf8abc518 | |||
5b88df508e | |||
fcedff95e8 | |||
e9ada44509 | |||
110bf6b778 | |||
22ebd2658f | |||
7a5889bc5a | |||
ad0d311f8a | |||
6214c38da9 | |||
1c604de158 | |||
64df159057 | |||
01a4052828 | |||
51581d14f8 | |||
fcc520e49a | |||
1541bce952 | |||
7dbefae1e3 | |||
8fdf860c17 | |||
2102e0da6b | |||
89b9b61840 | |||
7f26c75610 | |||
cdeb07f0fd | |||
cb45a10bcd | |||
7eb2d71009 | |||
976dc1f4bc | |||
1290edd58a | |||
341c244965 | |||
d962e46ed1 | |||
90514e03d1 | |||
200e98c211 | |||
bc845324df | |||
6a141694da | |||
dc2b63abdf | |||
4ab7ca0e83 | |||
0038b3848a | |||
88646a63a1 | |||
b12738cfe9 | |||
7aa6cc9b04 | |||
ee3a49cfba | |||
0353fbb5df | |||
92c0a2cdc1 | |||
aa02a7fdd8 | |||
77de82aaa4 | |||
0227254a65 | |||
03a01166ba | |||
d23c250ad5 | |||
081278dfd6 | |||
5676b204dd | |||
8c86348119 | |||
a7ae552ba7 | |||
757b2b502a | |||
adfd4da24c | |||
a79661c6dc | |||
851f979039 | |||
f858f64b1f | |||
9f8095c069 | |||
fa44e95c91 | |||
0ab541627b | |||
16698f714b | |||
931021fe57 | |||
4c9531bdf3 | |||
0a78107525 | |||
a9553af635 | |||
838ed1cd32 | |||
cc54c41e30 | |||
63db43cc7a | |||
4562b278a8 | |||
a57e522a67 | |||
91c5d0c042 | |||
007fec21fc | |||
a6b4069172 | |||
d7bc6a6999 | |||
9f62149b94 | |||
f25f454bd4 | |||
885f243afc | |||
ec87bf3dd5 | |||
ef965aa3f3 | |||
fc09d77e89 | |||
056180e6c8 | |||
3c149d8a43 | |||
b4dcdbf00d | |||
32b7bd366f | |||
00e2845f0f | |||
c92ef54466 | |||
28782ff99d | |||
b489515f4d | |||
54889813ce | |||
4bce66d5ff | |||
66e6ea56b8 | |||
6044b80362 | |||
be75e738b1 | |||
56fceb1928 | |||
9dbc8b2dd0 | |||
80c6aaf1fd | |||
bdc5599b73 | |||
73384aec21 | |||
0013236e5d | |||
9e5f9a8a10 | |||
c38b0b883d | |||
98285b4b18 | |||
4fc8f06791 | |||
c31cadb54f | |||
41c4a5b60d | |||
faa3cd3b71 | |||
2ab24c4f49 | |||
9885fb4159 | |||
66f55e3e6a | |||
a6218a20ae | |||
2364777838 | |||
aeaac743ff | |||
5099192c44 | |||
d8695da1d1 | |||
28197b2435 | |||
8d2a0b43ff | |||
634201244c | |||
3d90b03d7b | |||
81643e6d70 | |||
5aea8dd75b | |||
77eb37934f | |||
5b6adc6d96 | |||
d53df8a002 | |||
ca9fa329d1 | |||
51dbb2e06d | |||
aecbd14761 | |||
0cca2ea24f | |||
481b0bf277 | |||
b073fd49ea | |||
be2ebdd395 | |||
320670f8fe | |||
daef43f504 | |||
b120c32cad | |||
35fcc351a0 | |||
5b19dd23d9 | |||
d08cfda796 | |||
a9e552ab18 | |||
6cb1102bdb | |||
969adaefdf | |||
a67ccfdf3a | |||
ccd6f13793 | |||
f496cd320d | |||
9f4184208e | |||
bb89ef9fc0 | |||
70bee7d405 | |||
abbebad669 | |||
1bcf43baac | |||
9716fb3b36 | |||
ba30cef987 | |||
41bdc90f46 | |||
3bd4cf94cc | |||
f5ff3e8e19 | |||
02e0271e44 | |||
ce0315a10f | |||
7ac441e473 | |||
adf0c389c5 | |||
8cfe3e1ec0 | |||
4eda438f6f | |||
713acc408b | |||
a7d6930905 | |||
f0e804afd5 | |||
28c004aa2c | |||
78fe4259a9 | |||
312c2d1d8e | |||
ca78cb5aca | |||
456541e921 | |||
44c353fafd | |||
23fcf7920e | |||
d2b1ecc885 | |||
65b1d09d55 | |||
ab727e428b | |||
93a8633f18 | |||
cfc7314bd1 | |||
93978ec38a | |||
ff9414a6ba | |||
0542e2179f | |||
7d5395c12b | |||
3e6c05fe13 | |||
f4cab080a6 | |||
36715f571c | |||
e923a3ed6a | |||
bc02031793 | |||
dc64e139b9 | |||
5cf1b0b138 | |||
afb4133bd2 | |||
86b916b008 | |||
6faa87302c | |||
f4ff30e99d | |||
ab696f6a23 | |||
d89f5ca48e | |||
7e93811fbc | |||
0bf4f3f48a | |||
82df524e09 | |||
8e2c41e7f7 | |||
103dddba2f | |||
faf148d297 | |||
133ab98260 | |||
b489d699ce | |||
afb09c914d | |||
b64cd2a3e3 | |||
1fcc5f73ac | |||
32cf5a29ce | |||
e0c327bae2 | |||
c82a382b0b | |||
eb149030eb | |||
fd032165d7 | |||
d912c94034 | |||
563492f1e5 | |||
38ab541f4a | |||
af38196a6b | |||
e9104a0a32 | |||
70229f07c8 | |||
ee7d291442 | |||
29824d05ab | |||
76a2343639 | |||
10882bcbce | |||
a32236c80c | |||
3b2b3aeea9 | |||
39ed133f9f | |||
fd598f060c | |||
99b45d2aa0 | |||
57898d8a90 | |||
82fb5f0bef | |||
6b7841fefc | |||
834504aec0 | |||
26a9974667 | |||
28962bce99 | |||
6dc08bf45e | |||
087ae64899 | |||
3db25153e5 | |||
3c304c89d4 | |||
b0c0490e85 | |||
3b1cd4c4b4 | |||
c2afdbb1fb | |||
6476827d3a | |||
c10469ddb6 | |||
1e366dae3e | |||
187c713de5 | |||
ff440c1d9d | |||
2a3f9b32ff | |||
f346805c0c | |||
ef1ac8a0cb | |||
edfcdb171c | |||
3c91a9a551 | |||
bc4f4ee829 | |||
61fe422a88 | |||
57ed96622b | |||
b3c0d43890 | |||
0d0e900158 | |||
4536dfccd0 | |||
06c414a753 | |||
3c84075d2d | |||
4969abeaab | |||
e5dfde88fd | |||
7c7fba4e57 | |||
5d5d115608 | |||
7086009f93 | |||
d0b44c380f | |||
beae843766 | |||
5132a106a1 | |||
136efd6b53 | |||
4b78ef31b6 | |||
ea0c6d8c40 | |||
3def42abd8 | |||
a2bff68c1a | |||
aee49bb3cd | |||
49e4cc3daf | |||
15cce89a45 | |||
e425f70ef9 | |||
4fdbfd6048 | |||
270da98c46 | |||
e857ca4d7d | |||
ab2cf69e8d | |||
8e6d1ff0dc | |||
168fe0aa28 | |||
608c5bad24 | |||
7d36d664a7 | |||
225ae6fd25 | |||
2f9f6a1f21 | |||
984dc7c1ed | |||
1373637da1 | |||
1df68d342a | |||
b8e6db0feb | |||
c701f8bf36 | |||
4ddf008be2 | |||
2f5e61bacb | |||
1c0a5cd136 | |||
76b9178b16 | |||
a5e98cf46d | |||
5012cc3a32 | |||
28bd9e183e | |||
3a4a150ef0 | |||
02c655ff1a | |||
79efded841 | |||
f7efde11d9 | |||
e62b89a2ed | |||
bd7b285bae | |||
038e03a4e4 | |||
597144b0b9 | |||
837c1041c7 | |||
a56c46b6f1 | |||
df7a32e3d0 | |||
49bee2ebc5 | |||
a3944a7083 | |||
efba662ca6 | |||
e923d51b8f | |||
eeb0c70ea2 | |||
313c362461 | |||
c620626515 | |||
44b6843de7 | |||
c1ce4e4ca9 | |||
a3f8686fbf | |||
25f75d4d03 | |||
7e63e32960 | |||
1e11578ef0 | |||
f8d0f5265f | |||
1207a058d0 | |||
d61566787e | |||
c08f4599f2 | |||
bb5823c775 | |||
792225eaff | |||
a8680887d8 | |||
5b93d6ab91 | |||
5c762b71dd | |||
c30f17fafb | |||
34e02aba42 | |||
03bb95539b | |||
d81c0e8bba | |||
c112877a4a | |||
e8e32e0ba1 | |||
b31f36d68c | |||
ee09e50e7f | |||
374c2782ad | |||
566c4a53c5 | |||
5b9524e1ba | |||
31607bf9cd | |||
5a10de1b9f | |||
3b7e6afb55 | |||
0add4d735c | |||
3794ffc952 | |||
329bd4a1bb | |||
3b1358b62f | |||
c862b1bc6b | |||
e92d137676 | |||
b3d6c6a9a0 | |||
498c2b298c | |||
0e4e6dfada | |||
47d780b8ce | |||
0daa0e170a | |||
0d7d3ce802 | |||
71740805a7 | |||
e77291a6f3 | |||
716c8e22b0 | |||
f853790016 | |||
2b036449be | |||
0efa011e09 | |||
17c8c6f945 | |||
b3e2280bb9 | |||
1eee0029a8 | |||
59f58c15f7 | |||
361193099f | |||
7ff4a2a708 | |||
1aad66bdaa | |||
e65bad16cc | |||
ab92c814c3 | |||
0ad9499b93 | |||
7aa5753ed2 | |||
658f316511 | |||
89ee2cf576 | |||
bd1a371c62 | |||
8bd4f5d93e | |||
f713828406 | |||
3069bf4f4a | |||
6b1b42b928 | |||
e8cc7f9cee | |||
3a25137ee4 | |||
c765f277a3 | |||
9242f2f1d4 | |||
b0a417f342 | |||
75e7b1e3da | |||
4ff67ec2ee | |||
0f4c0beffd | |||
3bcc1c0560 | |||
f8dee1b402 | |||
7fa3a1d23e | |||
28a8df2f0a | |||
6fa00c61d2 | |||
726fcf015a | |||
c9b2d3ae1a | |||
2aeef09316 | |||
51767725b2 | |||
efbfa81fa7 | |||
f5ec14c54c | |||
127d3d028e | |||
1095874e7e | |||
33860bc3b7 | |||
e39aabbfe6 | |||
995d1a07d4 | |||
f6b06d6e5d | |||
19b6620a92 | |||
9c4660d3d6 | |||
75464a1baa | |||
2f73fa55ae | |||
45c45e11dd | |||
6e126c96a9 | |||
2c5c79d68e | |||
c2df51aa95 | |||
dcb00b2e54 | |||
da036dcc3e | |||
f9eab6e0de | |||
6a128d4ec7 | |||
5efe67f375 | |||
3af8fa194c | |||
0d09c64dde | |||
84c1dda39d | |||
dc636d190d | |||
2bcdd8844c | |||
0a4bde1f2f | |||
ee3f93c029 | |||
2658c5c545 | |||
27c7ab6e00 | |||
67e25f8724 | |||
12fb509d84 | |||
a2f46029c7 | |||
62a8f1d707 | |||
56777af8e4 | |||
9205b640a4 | |||
f2a786ecbf | |||
13ce0ebb87 | |||
bcc131e866 | |||
529c8f0eb1 | |||
2cb32edaa9 | |||
5a1d3609a9 | |||
1e3f05db8f | |||
a776ec9718 | |||
522e79f2e0 | |||
9ad8b74111 | |||
73dcdb27f6 | |||
6b7cc0022b | |||
9c27183876 | |||
25f8789aa5 | |||
3455082458 | |||
b7b23cd4a8 | |||
f0210453a6 | |||
615fe095e1 | |||
80d0f9c49d | |||
c9f9d39b54 | |||
0cc3132f5a | |||
38b6e8decd | |||
d48008339e | |||
54b97ed8e1 | |||
d301859bbd | |||
facfb4b615 | |||
42fd7dea78 | |||
62a70c300d | |||
c53be51460 | |||
f51eb46c69 | |||
7a3ce9bb1d | |||
2f9af6a707 | |||
f204344102 | |||
22f20f0c29 | |||
18844d60b5 | |||
3d02b19fbd | |||
bd63da0a0e | |||
f9be3ad3fd | |||
d781a6164a | |||
b18ec00a7a | |||
82a0f678fb | |||
5fcaedb880 | |||
2606c92ef9 | |||
ae47bb3594 | |||
636a9df177 | |||
f190d5f496 | |||
3c76b3548d | |||
a58d2b6137 | |||
08a0ff7091 | |||
e3095be85c | |||
9e1eb25232 | |||
71b069d3e1 | |||
e5bb96bc3b | |||
2924ed31f3 | |||
9b6b35d9b7 | |||
2cc4a467a6 | |||
1fc25148da | |||
07784c8990 | |||
f376c6a728 | |||
5c5e51095c | |||
cdaa96df63 | |||
246286f0eb | |||
6bf6b40495 | |||
f118d7e067 | |||
025835c5b2 | |||
36c1f93ceb | |||
b0e0c5eba0 | |||
daf126a638 | |||
7ac09d7b7c | |||
5af63c74e0 | |||
4510bbccca | |||
ae4a237e58 | |||
9bc9b36645 | |||
22b84fe543 | |||
3d731cc861 | |||
14f9f85c4b | |||
b5b7ec0162 | |||
3415812b06 | |||
ef381e17bb | |||
e174ccbd8e | |||
1e47f9b3ff | |||
2d068bd45b | |||
d92ad5640a | |||
64688b3786 | |||
fb7e6df790 | |||
c5a32fd4fa | |||
a273c46559 | |||
9e093d5ff3 | |||
41fc51ebcf | |||
4da6e1ea9c | |||
67c71130df | |||
9ccaea2afc | |||
fea9ffc46a | |||
229130ed25 | |||
5344abc008 | |||
86bcecf840 | |||
4128bdc859 | |||
907482c8ac | |||
774a255f2e | |||
98e69e63d2 | |||
f091f370d0 | |||
ad20d72a39 | |||
f0ddea821c | |||
73286dc8bf | |||
4e84999f20 | |||
411a118148 | |||
240b02e175 | |||
a463ae821e | |||
6d135beb21 | |||
6008f528d0 | |||
1dc857a4b2 | |||
4f19749252 | |||
79a143b32f | |||
5f109e8589 | |||
9423310816 | |||
68102fced8 | |||
1eb7ce5cdb | |||
4884b324e6 | |||
78bede1ffb | |||
b59fe77ec7 | |||
45330a5e47 | |||
794fce7bff | |||
e08b6b3ec7 | |||
8dcb3e0c41 | |||
c62d2f56d8 | |||
c318373b88 | |||
3090751dfc | |||
519b1cb5c9 | |||
e62157e896 | |||
c2ffcc4bd1 | |||
09ca5d14c9 | |||
2f561c77f5 | |||
8d710c5130 | |||
fcfb39c5de | |||
85c3d8aa52 | |||
aa4d9882d2 | |||
49aee6d02c | |||
7a0f86a04f | |||
a4a48be923 | |||
8788485924 | |||
616ed8f73c | |||
ea37fd821d | |||
62eee9c69e | |||
b5b89990eb | |||
9b03b0a1b2 | |||
f365de636f | |||
ee5a60e1c5 | |||
5e7b26791b | |||
b3a21d5a50 | |||
48b470140b | |||
89ce4e74fe | |||
69acdd437e | |||
b3776598d8 | |||
5d0ac3e3e6 | |||
fecf3d6fc1 | |||
d8f3421608 | |||
e8639517da | |||
d450b971f9 | |||
8f43698a60 | |||
3b60432687 | |||
d487791b03 | |||
91d8198d17 | |||
fa0cc2dc13 | |||
14ae01a6c9 | |||
f5f4438b43 | |||
b6e91291fb | |||
b41bf58658 | |||
a3e3bebed7 | |||
11309ee99c | |||
9c8a654079 | |||
2e00740515 | |||
b52d500fbc | |||
d91d321129 | |||
60480a1e2f | |||
65b821b192 | |||
433ac8c38a | |||
70e9b1e936 | |||
61dbcfa44a | |||
916dd3b7c5 | |||
b0c31500fc | |||
7be275b692 | |||
4b9e81fc89 | |||
51a37de885 | |||
d893e83622 | |||
33945a3115 | |||
afa86d8a45 | |||
cb5e57e2dd | |||
a8e3269ad6 | |||
2cd8675734 | |||
3916c54501 | |||
a17bb54d8f | |||
aa129dd7e8 | |||
510df4729c | |||
d25a859985 | |||
3b64735058 | |||
30dae0205e | |||
87a56d2bc9 | |||
26f060f66b | |||
c35befbf38 | |||
2fa5808e3f | |||
44c0dd0762 | |||
1bb9348a90 | |||
9141f5ef94 | |||
51d1785576 | |||
4f7f7538f7 | |||
1ae761311e | |||
7e1c94ab9c | |||
0a1beb688c | |||
5dd4dc2862 | |||
a576c7ae4b | |||
6c7db3d956 | |||
9fcbc83ebc | |||
cd158d4cde | |||
49a016b53d | |||
5039528b56 | |||
77e951e933 | |||
b032ceb5d4 | |||
914eab12f7 | |||
0dec761e21 | |||
5a23417499 | |||
cd5605bb86 | |||
0e5609d40e | |||
9d966a28d3 | |||
e7f2ab9138 | |||
9628da2d17 | |||
026f54dcf7 | |||
3cdf14d4c5 | |||
4ffbddf21f | |||
13217f072b | |||
0959e1501f | |||
58d039a70d | |||
d8e25a0863 | |||
e0cc7faea1 | |||
61b383f422 | |||
f8f33d35e0 | |||
57e8e5c965 | |||
ecc8bc8910 | |||
0a63e69e04 | |||
16755b26e2 | |||
85d51ab228 | |||
92f253adb2 | |||
222f2913c1 | |||
878b1873cd | |||
96f64c629e | |||
58a1f9081c | |||
e4c2abb1d9 | |||
d0240bd9d0 | |||
6e3f4e5e45 | |||
844a9022fb | |||
45877b3154 | |||
6120f6590b | |||
ba4ba685f9 | |||
276c87af68 | |||
a50f63840f | |||
54d5cec582 | |||
fc686aaca7 | |||
7370ef8c5e | |||
fc242f6e1f | |||
a0adfb5e8e | |||
c52d09d5b1 | |||
498f0d8539 | |||
278391d961 | |||
531bd6ddc7 | |||
d40dd3e4da | |||
07a0c82790 | |||
59ca4b9fe4 | |||
0694cc4916 | |||
38c76754ef | |||
9e2cbe3362 | |||
ced0c29c56 | |||
7d67c9e2e7 | |||
67d4a1b3fc | |||
45e0feab4e | |||
7a6e6eb5e2 | |||
9ec95679e1 | |||
57d253aeda | |||
fd8360deb1 | |||
9b7e516a56 | |||
b255be93fa | |||
218eb97241 | |||
2341b99379 | |||
1d5795d134 | |||
05c95dfdc6 | |||
27f3ef5f7a | |||
fe82516f9f | |||
415c0b86ba | |||
eded5558b2 | |||
f06355b0bb | |||
b0c5f59c07 | |||
e76558b0cc | |||
f9cc12ae0f | |||
23f9a22edc | |||
8e6efe4d87 | |||
a18d9a1f87 | |||
4e5e55c21a | |||
8ae9888959 | |||
cf9ddd293d | |||
466fb601d6 | |||
ebe7087bff | |||
72f18759ba | |||
92ec908303 | |||
e0058c1125 | |||
b4951c058b | |||
a71a96894d | |||
ea43080548 | |||
e78b96a657 | |||
8a4794fc51 | |||
535f8088d7 | |||
fbe8ec1fe7 | |||
a55453e634 | |||
5a6b62e77c | |||
63fab07047 | |||
c00fc6f8bb | |||
0cfeee13ee | |||
cf8a6a042e | |||
45ae086974 | |||
8ffdfa72e3 | |||
4fb138c42e | |||
640c7d748a | |||
c94bc59d7e | |||
b220885f42 | |||
1c2d36d8a3 | |||
0408c9d66a | |||
749764f35b | |||
a31db33e93 | |||
01c4f5abcd | |||
63f65bac3e | |||
a20c871ece | |||
649fb6e401 | |||
e48630da72 | |||
68d783145b | |||
32486b5beb | |||
a716ec61b9 | |||
c059924a8f | |||
3ef031b2fe | |||
58c07e7f8c | |||
7e120fc441 | |||
87902de010 | |||
1718fe3d74 | |||
82322ddab6 | |||
3d1854ab95 | |||
995d72b8c1 | |||
0c612f08c7 | |||
9b08f48dbd | |||
303c3ce89e | |||
8f56753a2f | |||
4fded5bd0e | |||
3abfe8aa22 | |||
0ccf4cf785 | |||
d8ff939409 | |||
9047dc8163 | |||
600aa223c2 | |||
f0e63025b0 | |||
082ad84914 | |||
6d52c5b2f0 | |||
21b4d60101 | |||
a4f8be7811 | |||
f0d028d3a4 | |||
9d47ee52b4 | |||
ddbd336387 | |||
0d01e4854b | |||
955302fd95 | |||
7cc1a358f5 | |||
99da69c85f | |||
222063b19d | |||
085d3b9d94 | |||
a30206a665 | |||
e63fdf2b22 | |||
b5d52b6b45 | |||
40993a0d25 | |||
855a251489 | |||
1228c2948d | |||
98fc24cbdf | |||
d6338af766 | |||
3889d956d9 | |||
5c62fbb6a8 | |||
8f76ec97c0 | |||
92ef1faa97 | |||
1e1821f002 | |||
60347a5483 | |||
b14cca2ad9 | |||
adacc7977d | |||
a7a4984175 | |||
b44b04d25b | |||
656a851830 | |||
8d82e37ec0 | |||
2a4cd81c86 | |||
566a7c3039 | |||
9133f38138 | |||
802e925fd7 | |||
5caf523fd9 | |||
2210818114 | |||
f6eecb855e | |||
4eeeccb9cd | |||
16ab3e02a9 | |||
f948a03be2 | |||
cde8478388 | |||
8ed8abb9df | |||
a122d3d466 | |||
eb92e72e6c | |||
341046c96c | |||
3a934b7020 | |||
03ca1ff634 | |||
35c9a3c558 | |||
56c3a61d83 | |||
871222aebd | |||
d3145be744 | |||
8bfa43f9a7 | |||
65e32fecb1 | |||
ff389f1270 | |||
5b4eda670b | |||
edb8c99fbe | |||
eca49e3a03 | |||
83c1db8763 | |||
90d4c1d153 | |||
9021b2dba6 | |||
f980422c57 | |||
b342a86c15 | |||
fb2c402ae1 | |||
38820bc75c | |||
4e9bd1fef5 | |||
a00f5850ee | |||
433d9bbc6e | |||
4b819457c9 | |||
a2182e68a6 | |||
e9e03259c1 | |||
bb15f16d8c | |||
9af946a306 | |||
99705deb7d | |||
67577a3760 | |||
ce8e56ee18 | |||
770f29fd05 | |||
acd2a63879 | |||
6cc6addc2f | |||
e41a3822a6 | |||
c4b0c57059 | |||
007e647462 | |||
d4e80407e5 | |||
f6a8096720 | |||
891e0188dd | |||
079742b4d3 | |||
d0c73564b1 | |||
5a6a698e1d | |||
4eda149ffa | |||
ac84db2506 | |||
30755e31e7 | |||
bc35c9a598 | |||
c6b883289c | |||
58237bd67f | |||
991be8950e | |||
54370e228a | |||
f277ea134f | |||
68f4af7d2e | |||
59a127d022 | |||
6ddb3e722c | |||
a3821a0b33 | |||
51c237f9d8 | |||
d8354f6f02 | |||
25b2853b70 | |||
ed05999f63 | |||
4d22d80281 | |||
5178b3d59d | |||
b597a92487 | |||
1f6e00878d | |||
31224a8425 | |||
a58ae5eb2a | |||
d6fa9c0414 | |||
7b67ae6972 | |||
e34437b2d7 | |||
15208c7d3d | |||
e5adfaade0 | |||
d21c80b865 | |||
944df52e2a | |||
3ded98e5fa | |||
d5e5baa20f | |||
0fb086f241 | |||
aed0704404 | |||
072382fa61 | |||
ad11c5fb3f | |||
5664c37539 | |||
3e2250423c | |||
ea605b499c | |||
bb1ab428db | |||
f928b91e9d | |||
dec460ce52 | |||
daa3673c1c | |||
c2405bcae2 | |||
4ca9472e02 | |||
1c504471d3 | |||
dc88a86259 | |||
580ed1119a | |||
bad0663138 | |||
220ba0785c | |||
4afc4d0751 | |||
605f75b56f | |||
ad5cafbfed | |||
3db517548d | |||
816db7a0aa | |||
3fe497e129 | |||
21aafd603c | |||
0a44ff86ab | |||
d784d87880 | |||
7cde312f14 | |||
34db376ae5 | |||
38ddc71b83 | |||
ba2eb0d7ad | |||
32da07ccee | |||
d19f394630 | |||
ff479c865d | |||
ada30c2789 | |||
02335ee72d | |||
1e3e756c19 | |||
6a230fe803 | |||
e55a569629 | |||
962bad3cea | |||
8806fcd545 | |||
1e358e3ae8 | |||
7dc594ba4d | |||
bfb46cbfbe | |||
6d04a285dc | |||
1bd37d213a | |||
883a8109c8 | |||
a4e0f3f724 | |||
edc06a97d6 | |||
ae77fe5a69 | |||
394844062f | |||
ecd2b2f217 | |||
91282c8b6a | |||
fae694a102 | |||
d5a356902a | |||
405a71d3a4 | |||
d3b1096510 | |||
8d734941af | |||
a4e3c7c37c | |||
6508d497ce | |||
4873abe145 | |||
bd4b18541c | |||
3f21760d56 | |||
bc3a0ac6a3 | |||
d7d8f38fb7 | |||
ee305c9284 | |||
9ade00e27b | |||
085c376655 | |||
dd385ad05b | |||
aa92311d4e | |||
3d144e62c4 | |||
576dd011a1 | |||
6b14b20369 | |||
54afec58a3 | |||
92c2b1dd2d | |||
f757df5dfd | |||
12358476da | |||
2c62eeea3c | |||
d31da26a51 | |||
b8a1fc0126 | |||
f6eae91c7d | |||
d44428fa90 | |||
11c7fef80a | |||
b12bfcb03b | |||
7178b6c2c4 | |||
45d0d7c3d4 | |||
adb1038b26 | |||
2a3b03138b | |||
ec1023e790 | |||
cd7e64b2b3 | |||
ac8353a64f | |||
fea7cac206 | |||
46ced5c828 | |||
7e7440c431 | |||
2ae3f40971 | |||
a3ac2623d5 | |||
ac5cc7ddad | |||
014a25697d | |||
fc4013a43f | |||
2fcae719ad | |||
f98b615bf3 | |||
07abebfc46 | |||
5f0088594b | |||
8453828a65 | |||
63cbeca64e | |||
736f0f7560 | |||
fe3be8f18a | |||
6a2834f2b0 | |||
7e16afbdce | |||
1c7a9a4132 | |||
50169b9798 | |||
374ec6773f | |||
a044cb6cc8 | |||
ba3e805981 | |||
2f0e1afd16 | |||
8148210860 | |||
1628a31efa | |||
115e0142d9 | |||
beb49b24f6 | |||
c84012d655 | |||
d6705d5529 | |||
55a8941922 | |||
a3ca80d20d | |||
3577de04b8 | |||
e974e6b3c9 | |||
8db16ff306 | |||
a8cda248b4 | |||
69285b22d3 | |||
b9cc6c10af | |||
d02c5cb023 | |||
37a48489da | |||
302866ad73 | |||
0a83a86e65 | |||
4e86ecf807 | |||
6ca3579cc0 | |||
66a4b26811 | |||
78f27c0465 | |||
3ad883d7c7 | |||
fecd8ca54a | |||
13977d9338 | |||
5d5b827f1a | |||
2a6d6a7f69 | |||
dfdaceb410 | |||
f51a63e4ef | |||
ce86a43779 | |||
f55f4cb02a | |||
06bf03f075 | |||
eefc6d7c44 | |||
1f7035f18f | |||
71dc6a3828 | |||
5d1c625b74 | |||
c42d3c19e2 | |||
3a23dc242e | |||
1df1f88fe1 | |||
2174042994 | |||
5cc81a0179 | |||
6a047519f6 | |||
5404776f7a | |||
dff68a339a | |||
dde3e01a59 | |||
a26553c90a | |||
2a10b2275e | |||
ba9527abc0 | |||
6c726df9b9 | |||
24587148fd | |||
6762c2d08f | |||
3a998cf39c | |||
1237306ca8 | |||
3668627e03 | |||
a81f201fad | |||
91ba938953 | |||
4573f00a0d |
@ -2,3 +2,4 @@ target
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
.gitignore
|
||||
**/.git
|
||||
|
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -23,7 +23,8 @@ A clear and concise description of what you expected to happen.
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Meilisearch version:** [e.g. v0.20.0]
|
||||
**Meilisearch version:**
|
||||
[e.g. v0.20.0]
|
||||
|
||||
**Additional context**
|
||||
Additional information that may be relevant to the issue.
|
||||
|
34
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
Normal file
34
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
---
|
||||
name: New sprint issue
|
||||
about: ⚠️ Should only be used by the engine team ⚠️
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
Related product team resources: [roadmap card]() (_internal only_) and [PRD]() (_internal only_)
|
||||
Related product discussion:
|
||||
Related spec: WIP
|
||||
|
||||
## Motivation
|
||||
|
||||
<!---Copy/paste the information in the roadmap resources or briefly detail the product motivation. Ask product team if any hesitation.-->
|
||||
|
||||
## Usage
|
||||
|
||||
<!---Write a quick description of the usage if the usage has already been defined-->
|
||||
|
||||
Refer to the final spec to know the details and the final decisions about the usage.
|
||||
|
||||
## TODO
|
||||
|
||||
<!---Feel free to adapt this list with more technical/product steps-->
|
||||
|
||||
- [ ] Release a prototype
|
||||
- [ ] If prototype validated, merge changes into `main`
|
||||
- [ ] Update the spec
|
||||
|
||||
## Impacted teams
|
||||
|
||||
<!---Ping the related teams. Ask for the engine manager if any hesitation-->
|
1
.github/dependabot.yml
vendored
1
.github/dependabot.yml
vendored
@ -2,7 +2,6 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
|
10
.github/scripts/check-release.sh
vendored
10
.github/scripts/check-release.sh
vendored
@ -3,7 +3,7 @@
|
||||
# check_tag $current_tag $file_tag $file_name
|
||||
function check_tag {
|
||||
if [[ "$1" != "$2" ]]; then
|
||||
echo "Error: the current tag does not match the version in $3: found $2 - expected $1"
|
||||
echo "Error: the current tag does not match the version in Cargo.toml: found $2 - expected $1"
|
||||
ret=1
|
||||
fi
|
||||
}
|
||||
@ -11,12 +11,8 @@ function check_tag {
|
||||
ret=0
|
||||
current_tag=${GITHUB_REF#'refs/tags/v'}
|
||||
|
||||
toml_files='*/Cargo.toml'
|
||||
for toml_file in $toml_files;
|
||||
do
|
||||
file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
||||
check_tag $current_tag $file_tag $toml_file
|
||||
done
|
||||
file_tag="$(grep '^version = ' Cargo.toml | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
||||
check_tag $current_tag $file_tag
|
||||
|
||||
lock_file='Cargo.lock'
|
||||
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')
|
||||
|
77
.github/workflows/benchmarks-manual.yml
vendored
Normal file
77
.github/workflows/benchmarks-manual.yml
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
name: Benchmarks (manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dataset_name:
|
||||
description: 'The name of the dataset used to benchmark (search_songs, search_wiki, search_geo or indexing)'
|
||||
required: false
|
||||
default: 'search_songs'
|
||||
|
||||
env:
|
||||
BENCH_NAME: ${{ github.event.inputs.dataset_name }}
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Set variables
|
||||
- name: Set current branch name
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_OUTPUT
|
||||
id: current_branch
|
||||
- name: Set normalized current branch name # Replace `/` by `_` in branch name to avoid issues when pushing to S3
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/} | tr '/' '_')" >> $GITHUB_OUTPUT
|
||||
id: normalized_current_branch
|
||||
- name: Set shorter commit SHA
|
||||
shell: bash
|
||||
run: echo "short=$(echo $GITHUB_SHA | cut -c1-8)" >> $GITHUB_OUTPUT
|
||||
id: commit_sha
|
||||
- name: Set file basename with format "dataset_branch_commitSHA"
|
||||
shell: bash
|
||||
run: echo "basename=$(echo ${BENCH_NAME}_${{ steps.normalized_current_branch.outputs.name }}_${{ steps.commit_sha.outputs.short }})" >> $GITHUB_OUTPUT
|
||||
id: file
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch ${{ steps.current_branch.outputs.name }} - Commit ${{ steps.commit_sha.outputs.short }}
|
||||
run: |
|
||||
cd benchmarks
|
||||
cargo bench --bench ${BENCH_NAME} -- --save-baseline ${{ steps.file.outputs.basename }}
|
||||
|
||||
# Generate critcmp files
|
||||
- name: Install critcmp
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: critcmp
|
||||
- name: Export cripcmp file
|
||||
run: |
|
||||
critcmp --export ${{ steps.file.outputs.basename }} > ${{ steps.file.outputs.basename }}.json
|
||||
|
||||
# Upload benchmarks
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to DO Spaces # DigitalOcean Spaces = S3
|
||||
uses: BetaHuhn/do-spaces-action@v2
|
||||
with:
|
||||
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
|
||||
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
|
||||
space_name: ${{ secrets.DO_SPACES_SPACE_NAME }}
|
||||
space_region: ${{ secrets.DO_SPACES_SPACE_REGION }}
|
||||
source: ${{ steps.file.outputs.basename }}.json
|
||||
out_dir: critcmp_results
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
||||
echo " - Run the following command: ./benchmaks/scipts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
79
.github/workflows/benchmarks-push-indexing.yml
vendored
Normal file
79
.github/workflows/benchmarks-push-indexing.yml
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
name: Benchmarks of indexing (push)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
INFLUX_TOKEN: ${{ secrets.INFLUX_TOKEN }}
|
||||
BENCH_NAME: "indexing"
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Set variables
|
||||
- name: Set current branch name
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_OUTPUT
|
||||
id: current_branch
|
||||
- name: Set normalized current branch name # Replace `/` by `_` in branch name to avoid issues when pushing to S3
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/} | tr '/' '_')" >> $GITHUB_OUTPUT
|
||||
id: normalized_current_branch
|
||||
- name: Set shorter commit SHA
|
||||
shell: bash
|
||||
run: echo "short=$(echo $GITHUB_SHA | cut -c1-8)" >> $GITHUB_OUTPUT
|
||||
id: commit_sha
|
||||
- name: Set file basename with format "dataset_branch_commitSHA"
|
||||
shell: bash
|
||||
run: echo "basename=$(echo ${BENCH_NAME}_${{ steps.normalized_current_branch.outputs.name }}_${{ steps.commit_sha.outputs.short }})" >> $GITHUB_OUTPUT
|
||||
id: file
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch ${{ steps.current_branch.outputs.name }} - Commit ${{ steps.commit_sha.outputs.short }}
|
||||
run: |
|
||||
cd benchmarks
|
||||
cargo bench --bench ${BENCH_NAME} -- --save-baseline ${{ steps.file.outputs.basename }}
|
||||
|
||||
# Generate critcmp files
|
||||
- name: Install critcmp
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: critcmp
|
||||
- name: Export cripcmp file
|
||||
run: |
|
||||
critcmp --export ${{ steps.file.outputs.basename }} > ${{ steps.file.outputs.basename }}.json
|
||||
|
||||
# Upload benchmarks
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to DO Spaces # DigitalOcean Spaces = S3
|
||||
uses: BetaHuhn/do-spaces-action@v2
|
||||
with:
|
||||
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
|
||||
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
|
||||
space_name: ${{ secrets.DO_SPACES_SPACE_NAME }}
|
||||
space_region: ${{ secrets.DO_SPACES_SPACE_REGION }}
|
||||
source: ${{ steps.file.outputs.basename }}.json
|
||||
out_dir: critcmp_results
|
||||
|
||||
# Upload benchmarks to influxdb
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to influxDB
|
||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
||||
echo " - Run the following command: ./benchmaks/scipts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
78
.github/workflows/benchmarks-push-search-geo.yml
vendored
Normal file
78
.github/workflows/benchmarks-push-search-geo.yml
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
name: Benchmarks of search for geo (push)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
BENCH_NAME: "search_geo"
|
||||
INFLUX_TOKEN: ${{ secrets.INFLUX_TOKEN }}
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Set variables
|
||||
- name: Set current branch name
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_OUTPUT
|
||||
id: current_branch
|
||||
- name: Set normalized current branch name # Replace `/` by `_` in branch name to avoid issues when pushing to S3
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/} | tr '/' '_')" >> $GITHUB_OUTPUT
|
||||
id: normalized_current_branch
|
||||
- name: Set shorter commit SHA
|
||||
shell: bash
|
||||
run: echo "short=$(echo $GITHUB_SHA | cut -c1-8)" >> $GITHUB_OUTPUT
|
||||
id: commit_sha
|
||||
- name: Set file basename with format "dataset_branch_commitSHA"
|
||||
shell: bash
|
||||
run: echo "basename=$(echo ${BENCH_NAME}_${{ steps.normalized_current_branch.outputs.name }}_${{ steps.commit_sha.outputs.short }})" >> $GITHUB_OUTPUT
|
||||
id: file
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch ${{ steps.current_branch.outputs.name }} - Commit ${{ steps.commit_sha.outputs.short }}
|
||||
run: |
|
||||
cd benchmarks
|
||||
cargo bench --bench ${BENCH_NAME} -- --save-baseline ${{ steps.file.outputs.basename }}
|
||||
|
||||
# Generate critcmp files
|
||||
- name: Install critcmp
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: critcmp
|
||||
- name: Export cripcmp file
|
||||
run: |
|
||||
critcmp --export ${{ steps.file.outputs.basename }} > ${{ steps.file.outputs.basename }}.json
|
||||
|
||||
# Upload benchmarks
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to DO Spaces # DigitalOcean Spaces = S3
|
||||
uses: BetaHuhn/do-spaces-action@v2
|
||||
with:
|
||||
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
|
||||
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
|
||||
space_name: ${{ secrets.DO_SPACES_SPACE_NAME }}
|
||||
space_region: ${{ secrets.DO_SPACES_SPACE_REGION }}
|
||||
source: ${{ steps.file.outputs.basename }}.json
|
||||
out_dir: critcmp_results
|
||||
|
||||
# Upload benchmarks to influxdb
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to influxDB
|
||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
||||
echo " - Run the following command: ./benchmaks/scipts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
78
.github/workflows/benchmarks-push-search-songs.yml
vendored
Normal file
78
.github/workflows/benchmarks-push-search-songs.yml
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
name: Benchmarks of search for songs (push)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
BENCH_NAME: "search_songs"
|
||||
INFLUX_TOKEN: ${{ secrets.INFLUX_TOKEN }}
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Set variables
|
||||
- name: Set current branch name
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_OUTPUT
|
||||
id: current_branch
|
||||
- name: Set normalized current branch name # Replace `/` by `_` in branch name to avoid issues when pushing to S3
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/} | tr '/' '_')" >> $GITHUB_OUTPUT
|
||||
id: normalized_current_branch
|
||||
- name: Set shorter commit SHA
|
||||
shell: bash
|
||||
run: echo "short=$(echo $GITHUB_SHA | cut -c1-8)" >> $GITHUB_OUTPUT
|
||||
id: commit_sha
|
||||
- name: Set file basename with format "dataset_branch_commitSHA"
|
||||
shell: bash
|
||||
run: echo "basename=$(echo ${BENCH_NAME}_${{ steps.normalized_current_branch.outputs.name }}_${{ steps.commit_sha.outputs.short }})" >> $GITHUB_OUTPUT
|
||||
id: file
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch ${{ steps.current_branch.outputs.name }} - Commit ${{ steps.commit_sha.outputs.short }}
|
||||
run: |
|
||||
cd benchmarks
|
||||
cargo bench --bench ${BENCH_NAME} -- --save-baseline ${{ steps.file.outputs.basename }}
|
||||
|
||||
# Generate critcmp files
|
||||
- name: Install critcmp
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: critcmp
|
||||
- name: Export cripcmp file
|
||||
run: |
|
||||
critcmp --export ${{ steps.file.outputs.basename }} > ${{ steps.file.outputs.basename }}.json
|
||||
|
||||
# Upload benchmarks
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to DO Spaces # DigitalOcean Spaces = S3
|
||||
uses: BetaHuhn/do-spaces-action@v2
|
||||
with:
|
||||
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
|
||||
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
|
||||
space_name: ${{ secrets.DO_SPACES_SPACE_NAME }}
|
||||
space_region: ${{ secrets.DO_SPACES_SPACE_REGION }}
|
||||
source: ${{ steps.file.outputs.basename }}.json
|
||||
out_dir: critcmp_results
|
||||
|
||||
# Upload benchmarks to influxdb
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to influxDB
|
||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
||||
echo " - Run the following command: ./benchmaks/scipts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
78
.github/workflows/benchmarks-push-search-wiki.yml
vendored
Normal file
78
.github/workflows/benchmarks-push-search-wiki.yml
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
name: Benchmarks of search for Wikipedia articles (push)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
BENCH_NAME: "search_wiki"
|
||||
INFLUX_TOKEN: ${{ secrets.INFLUX_TOKEN }}
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
# Set variables
|
||||
- name: Set current branch name
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/})" >> $GITHUB_OUTPUT
|
||||
id: current_branch
|
||||
- name: Set normalized current branch name # Replace `/` by `_` in branch name to avoid issues when pushing to S3
|
||||
shell: bash
|
||||
run: echo "name=$(echo ${GITHUB_REF#refs/heads/} | tr '/' '_')" >> $GITHUB_OUTPUT
|
||||
id: normalized_current_branch
|
||||
- name: Set shorter commit SHA
|
||||
shell: bash
|
||||
run: echo "short=$(echo $GITHUB_SHA | cut -c1-8)" >> $GITHUB_OUTPUT
|
||||
id: commit_sha
|
||||
- name: Set file basename with format "dataset_branch_commitSHA"
|
||||
shell: bash
|
||||
run: echo "basename=$(echo ${BENCH_NAME}_${{ steps.normalized_current_branch.outputs.name }}_${{ steps.commit_sha.outputs.short }})" >> $GITHUB_OUTPUT
|
||||
id: file
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch ${{ steps.current_branch.outputs.name }} - Commit ${{ steps.commit_sha.outputs.short }}
|
||||
run: |
|
||||
cd benchmarks
|
||||
cargo bench --bench ${BENCH_NAME} -- --save-baseline ${{ steps.file.outputs.basename }}
|
||||
|
||||
# Generate critcmp files
|
||||
- name: Install critcmp
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: critcmp
|
||||
- name: Export cripcmp file
|
||||
run: |
|
||||
critcmp --export ${{ steps.file.outputs.basename }} > ${{ steps.file.outputs.basename }}.json
|
||||
|
||||
# Upload benchmarks
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to DO Spaces # DigitalOcean Spaces = S3
|
||||
uses: BetaHuhn/do-spaces-action@v2
|
||||
with:
|
||||
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
|
||||
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
|
||||
space_name: ${{ secrets.DO_SPACES_SPACE_NAME }}
|
||||
space_region: ${{ secrets.DO_SPACES_SPACE_REGION }}
|
||||
source: ${{ steps.file.outputs.basename }}.json
|
||||
out_dir: critcmp_results
|
||||
|
||||
# Upload benchmarks to influxdb
|
||||
- name: Upload ${{ steps.file.outputs.basename }}.json to influxDB
|
||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
||||
echo " - Run the following command: ./benchmaks/scipts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
23
.github/workflows/create-issue-dependencies.yml
vendored
23
.github/workflows/create-issue-dependencies.yml
vendored
@ -1,23 +0,0 @@
|
||||
name: Create issue to upgrade dependencies
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 1 */3 *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
create-issue:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Create an issue
|
||||
uses: actions-ecosystem/action-create-issue@v1
|
||||
with:
|
||||
github_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
title: Upgrade dependencies
|
||||
body: |
|
||||
We need to update the dependencies of the Meilisearch repository, and, if possible, the dependencies of all the engine-team repositories that Meilisearch depends on (milli, charabia, heed...).
|
||||
|
||||
⚠️ This issue should only be done at the beginning of the sprint!
|
||||
labels: |
|
||||
dependencies
|
||||
maintenance
|
24
.github/workflows/dependency-issue.yml
vendored
Normal file
24
.github/workflows/dependency-issue.yml
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
name: Create issue to upgrade dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run the first of the month, every 3 month
|
||||
- cron: '0 0 1 */3 *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
create-issue:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ISSUE_TEMPLATE: issue-template.md
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download the issue template
|
||||
run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/dependency-issue.md > $ISSUE_TEMPLATE
|
||||
- name: Create issue
|
||||
run: |
|
||||
gh issue create \
|
||||
--title 'Upgrade dependencies' \
|
||||
--label 'dependencies,maintenance' \
|
||||
--body-file $ISSUE_TEMPLATE
|
@ -22,5 +22,11 @@ jobs:
|
||||
override: true
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky 100 times
|
||||
run: cargo flaky -i 100 --release
|
||||
- name: Run cargo flaky in the dumps
|
||||
run: cd dump; cargo flaky -i 100 --release
|
||||
- name: Run cargo flaky in the index-scheduler
|
||||
run: cd index-scheduler; cargo flaky -i 100 --release
|
||||
- name: Run cargo flaky in the auth
|
||||
run: cd meilisearch-auth; cargo flaky -i 100 --release
|
||||
- name: Run cargo flaky in meilisearch
|
||||
run: cd meilisearch; cargo flaky -i 100 --release
|
@ -1,4 +1,4 @@
|
||||
name: Publish to APT repository & Homebrew
|
||||
name: Publish to APT & Homebrew
|
||||
|
||||
on:
|
||||
release:
|
||||
@ -35,7 +35,7 @@ jobs:
|
||||
- name: Build deb package
|
||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||
- name: Upload debian pkg to release
|
||||
uses: svenstaro/upload-release-action@2.3.0
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/debian/meilisearch.deb
|
52
.github/workflows/publish-binaries.yml
vendored
52
.github/workflows/publish-binaries.yml
vendored
@ -1,3 +1,5 @@
|
||||
name: Publish binaries to GitHub release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
@ -5,8 +7,6 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
name: Publish binaries to release
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
name: Check the version validity
|
||||
@ -54,7 +54,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.3.0
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/meilisearch
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.3.0
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/${{ matrix.artifact_name }}
|
||||
@ -96,14 +96,12 @@ jobs:
|
||||
|
||||
publish-macos-apple-silicon:
|
||||
name: Publish binary for macOS silicon
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: macos-12
|
||||
needs: check-version
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: macos-12
|
||||
target: aarch64-apple-darwin
|
||||
- target: aarch64-apple-darwin
|
||||
asset_name: meilisearch-macos-apple-silicon
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@ -123,7 +121,7 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.3.0
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
@ -132,21 +130,29 @@ jobs:
|
||||
|
||||
publish-aarch64:
|
||||
name: Publish binary for aarch64
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-version
|
||||
container:
|
||||
# Use ubuntu-18.04 to compile with glibc 2.27
|
||||
image: ubuntu:18.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- build: aarch64
|
||||
os: ubuntu-18.04
|
||||
target: aarch64-unknown-linux-gnu
|
||||
linker: gcc-aarch64-linux-gnu
|
||||
use-cross: true
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
asset_name: meilisearch-linux-aarch64
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update -y && apt upgrade -y
|
||||
apt-get install -y curl build-essential gcc-aarch64-linux-gnu
|
||||
- name: Set up Docker for cross compilation
|
||||
run: |
|
||||
apt-get install -y curl apt-transport-https ca-certificates software-properties-common
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
apt-get update -y && apt-get install -y docker-ce
|
||||
- name: Installing Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
@ -154,15 +160,7 @@ jobs:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
override: true
|
||||
- name: APT update
|
||||
run: |
|
||||
sudo apt update
|
||||
- name: Install target specific tools
|
||||
if: matrix.use-cross
|
||||
run: |
|
||||
sudo apt-get install -y ${{ matrix.linker }}
|
||||
- name: Configure target aarch64 GNU
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
## Environment variable is not passed using env:
|
||||
## LD gold won't work with MUSL
|
||||
# env:
|
||||
@ -176,14 +174,16 @@ jobs:
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
use-cross: ${{ matrix.use-cross }}
|
||||
use-cross: true
|
||||
args: --release --target ${{ matrix.target }}
|
||||
env:
|
||||
CROSS_DOCKER_IN_DOCKER: true
|
||||
- name: List target output files
|
||||
run: ls -lR ./target
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.3.0
|
||||
uses: svenstaro/upload-release-action@2.5.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
|
18
.github/workflows/publish-docker-images.yml
vendored
18
.github/workflows/publish-docker-images.yml
vendored
@ -1,4 +1,5 @@
|
||||
---
|
||||
name: Publish images to Docker Hub
|
||||
|
||||
on:
|
||||
push:
|
||||
# Will run for every tag pushed except `latest`
|
||||
@ -12,8 +13,6 @@ on:
|
||||
- cron: '0 23 * * *' # Every day at 11:00pm
|
||||
workflow_dispatch:
|
||||
|
||||
name: Publish tagged images to Docker Hub
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: docker
|
||||
@ -52,9 +51,6 @@ jobs:
|
||||
- name: Set build-args for Docker buildx
|
||||
id: build-metadata
|
||||
run: |
|
||||
# Define ownership
|
||||
git config --global --add safe.directory /home/meili/actions-runner/_work/meilisearch/meilisearch
|
||||
|
||||
# Extract commit date
|
||||
commit_date=$(git show -s --format=%cd --date=iso-strict ${{ github.sha }})
|
||||
|
||||
@ -62,9 +58,13 @@ jobs:
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
@ -87,14 +87,18 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
build-args: |
|
||||
COMMIT_SHA=${{ github.sha }}
|
||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||
GIT_TAG=${{ github.ref_name }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# /!\ Don't touch this without checking with Cloud team
|
||||
- name: Send CI information to Cloud team
|
||||
|
127
.github/workflows/rust.yml
vendored
127
.github/workflows/rust.yml
vendored
@ -1,127 +0,0 @@
|
||||
name: Rust
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
# trying and staging branches are for Bors config
|
||||
branches:
|
||||
- trying
|
||||
- staging
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
test-linux:
|
||||
name: Tests on ubuntu-18.04
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||
image: ubuntu:18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.0
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --locked --release --no-default-features
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked --release
|
||||
|
||||
test-others:
|
||||
name: Tests on ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macos-12, windows-2022]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.0
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --locked --release --no-default-features
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked --release
|
||||
|
||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||
test-debug:
|
||||
name: Run tests in debug
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||
image: ubuntu:18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.0
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked
|
||||
|
||||
clippy:
|
||||
name: Run Clippy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.0
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets -- --deny warnings --allow clippy::uninlined_format_args
|
||||
|
||||
fmt:
|
||||
name: Run Rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.0
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all -- --check
|
200
.github/workflows/sdks-tests.yml
vendored
Normal file
200
.github/workflows/sdks-tests.yml
vendored
Normal file
@ -0,0 +1,200 @@
|
||||
# If any test fails, the engine team should ensure the "breaking" changes are expected and contact the integration team
|
||||
name: SDKs tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 6 * * MON" # Every Monday at 6:00AM
|
||||
|
||||
env:
|
||||
MEILI_MASTER_KEY: 'masterKey'
|
||||
MEILI_NO_ANALYTICS: 'true'
|
||||
|
||||
jobs:
|
||||
|
||||
meilisearch-js-tests:
|
||||
name: JS SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-js
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
cache: 'yarn'
|
||||
- name: Install dependencies
|
||||
run: yarn --dev
|
||||
- name: Run tests
|
||||
run: yarn test
|
||||
- name: Build project
|
||||
run: yarn build
|
||||
- name: Run ESM env
|
||||
run: yarn test:env:esm
|
||||
- name: Run Node.js env
|
||||
run: yarn test:env:nodejs
|
||||
- name: Run node typescript env
|
||||
run: yarn test:env:node-ts
|
||||
- name: Run Browser env
|
||||
run: yarn test:env:browser
|
||||
|
||||
instant-meilisearch-tests:
|
||||
name: instant-meilisearch tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/instant-meilisearch
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
cache: yarn
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Run tests
|
||||
run: yarn test
|
||||
- name: Build all the playgrounds and the packages
|
||||
run: yarn build
|
||||
|
||||
meilisearch-php-tests:
|
||||
name: PHP SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-php
|
||||
- name: Install PHP
|
||||
uses: shivammathur/setup-php@v2
|
||||
with:
|
||||
coverage: none
|
||||
- name: Validate composer.json and composer.lock
|
||||
run: composer validate
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
composer remove --dev friendsofphp/php-cs-fixer --no-update --no-interaction
|
||||
composer update --prefer-dist --no-progress
|
||||
- name: Run test suite - default HTTP client (Guzzle 7)
|
||||
run: |
|
||||
sh scripts/tests.sh
|
||||
composer remove --dev guzzlehttp/guzzle http-interop/http-factory-guzzle
|
||||
|
||||
meilisearch-python-tests:
|
||||
name: Python SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-python
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install pipenv
|
||||
uses: dschep/install-pipenv-action@v1
|
||||
- name: Install dependencies
|
||||
run: pipenv install --dev --python=${{ matrix.python-version }}
|
||||
- name: Test with pytest
|
||||
run: pipenv run pytest
|
||||
|
||||
meilisearch-go-tests:
|
||||
name: Go SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: stable
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-go
|
||||
- name: Get dependencies
|
||||
run: |
|
||||
go get -v -t -d ./...
|
||||
if [ -f Gopkg.toml ]; then
|
||||
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||
dep ensure
|
||||
fi
|
||||
- name: Run integration tests
|
||||
run: go test -v ./...
|
||||
|
||||
meilisearch-ruby-tests:
|
||||
name: Ruby SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-ruby
|
||||
- name: Set up Ruby 3
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3
|
||||
- name: Install ruby dependencies
|
||||
run: bundle install --with test
|
||||
- name: Run test suite
|
||||
run: bundle exec rspec
|
||||
|
||||
meilisearch-rust-tests:
|
||||
name: Rust SDK tests
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:nightly
|
||||
env:
|
||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-rust
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
- name: Run tests
|
||||
run: cargo test --verbose
|
171
.github/workflows/test-suite.yml
vendored
Normal file
171
.github/workflows/test-suite.yml
vendored
Normal file
@ -0,0 +1,171 @@
|
||||
name: Test suite
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Everyday at 5:00am
|
||||
- cron: '0 5 * * *'
|
||||
pull_request:
|
||||
push:
|
||||
# trying and staging branches are for Bors config
|
||||
branches:
|
||||
- trying
|
||||
- staging
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
test-linux:
|
||||
name: Tests on ubuntu-18.04
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||
image: ubuntu:18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- name: Run test with Rust stable
|
||||
if: github.event_name != 'schedule'
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Run test with Rust nightly
|
||||
if: github.event_name == 'schedule'
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --locked --release --no-default-features --all
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked --release --all
|
||||
|
||||
test-others:
|
||||
name: Tests on ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macos-12, windows-2022]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --locked --release --no-default-features --all
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked --release --all
|
||||
|
||||
test-all-features:
|
||||
name: Tests all features on cron schedule only
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||
image: ubuntu:18.04
|
||||
if: github.event_name == 'schedule'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Run cargo build with all features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --workspace --locked --release --all-features
|
||||
- name: Run cargo test with all features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --workspace --locked --release --all-features
|
||||
|
||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||
test-debug:
|
||||
name: Run tests in debug
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||
image: ubuntu:18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked --all
|
||||
|
||||
clippy:
|
||||
name: Run Clippy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: 1.69.0
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets -- --deny warnings
|
||||
|
||||
fmt:
|
||||
name: Run Rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.2.1
|
||||
- name: Run cargo fmt
|
||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||
# we are going to create an empty file where rustfmt expects it.
|
||||
run: |
|
||||
echo -ne "\n" > benchmarks/benches/datasets_paths.rs
|
||||
cargo fmt --all -- --check
|
15
.github/workflows/update-cargo-toml-version.yml
vendored
15
.github/workflows/update-cargo-toml-version.yml
vendored
@ -1,4 +1,4 @@
|
||||
name: Update Meilisearch version in all Cargo.toml files
|
||||
name: Update Meilisearch version in Cargo.toml
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@ -13,9 +13,8 @@ env:
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
|
||||
jobs:
|
||||
|
||||
update-version-cargo-toml:
|
||||
name: Update version in Cargo.toml files
|
||||
name: Update version in Cargo.toml
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@ -26,23 +25,23 @@ jobs:
|
||||
override: true
|
||||
- name: Install sd
|
||||
run: cargo install sd
|
||||
- name: Update Cargo.toml files
|
||||
- name: Update Cargo.toml file
|
||||
run: |
|
||||
raw_new_version=$(echo $NEW_VERSION | cut -d 'v' -f 2)
|
||||
new_string="version = \"$raw_new_version\""
|
||||
sd '^version = "\d+.\d+.\w+"$' "$new_string" */Cargo.toml
|
||||
sd '^version = "\d+.\d+.\w+"$' "$new_string" Cargo.toml
|
||||
- name: Build Meilisearch to update Cargo.lock
|
||||
run: cargo build
|
||||
- name: Commit and push the changes to the ${{ env.NEW_BRANCH }} branch
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
message: "Update version for the next release (${{ env.NEW_VERSION }}) in Cargo.toml files"
|
||||
message: "Update version for the next release (${{ env.NEW_VERSION }}) in Cargo.toml"
|
||||
new_branch: ${{ env.NEW_BRANCH }}
|
||||
- name: Create the PR pointing to ${{ github.ref_name }}
|
||||
run: |
|
||||
gh pr create \
|
||||
--title "Update version for the next release ($NEW_VERSION) in Cargo.toml files" \
|
||||
--body '⚠️ This PR is automatically generated. Check the new version is the expected one before merging.' \
|
||||
--title "Update version for the next release ($NEW_VERSION) in Cargo.toml" \
|
||||
--body '⚠️ This PR is automatically generated. Check the new version is the expected one and Cargo.lock has been updated before merging.' \
|
||||
--label 'skip changelog' \
|
||||
--milestone $NEW_VERSION \
|
||||
--base $GITHUB_REF_NAME
|
||||
|
6
.gitignore
vendored
6
.gitignore
vendored
@ -1,3 +1,5 @@
|
||||
.idea/
|
||||
.vscode/
|
||||
/target
|
||||
**/*.csv
|
||||
**/*.json_lines
|
||||
@ -8,9 +10,11 @@
|
||||
/snapshots
|
||||
/dumps
|
||||
|
||||
|
||||
# Snapshots
|
||||
## ... large
|
||||
*.full.snap
|
||||
## ... unreviewed
|
||||
*.snap.new
|
||||
|
||||
# Fuzzcheck data for the facet indexing fuzz test
|
||||
milli/fuzz/update::facet::incremental::fuzz::fuzz/
|
||||
|
@ -18,9 +18,9 @@ If Meilisearch does not offer optimized support for your language, please consid
|
||||
|
||||
## Assumptions
|
||||
|
||||
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
||||
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests (PR)](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests) workflow.**
|
||||
2. **You've read the Meilisearch [documentation](https://www.meilisearch.com/docs).**
|
||||
3. **You know about the [Meilisearch community on Discord](https://discord.meilisearch.com).
|
||||
Please use this for help.**
|
||||
|
||||
## How to Contribute
|
||||
@ -52,6 +52,23 @@ cargo test
|
||||
|
||||
This command will be triggered to each PR as a requirement for merging it.
|
||||
|
||||
#### Snapshot-based tests
|
||||
|
||||
We are using [insta](https://insta.rs) to perform snapshot-based testing.
|
||||
We recommend using the insta tooling (such as `cargo-insta`) to update the snapshots if they change following a PR.
|
||||
|
||||
New tests should use insta where possible rather than manual `assert` statements.
|
||||
|
||||
Furthermore, we provide some macros on top of insta, notably a way to use snapshot hashes instead of inline snapshots, saving a lot of space in the repository.
|
||||
|
||||
To effectively debug snapshot-based hashes, we recommend you export the `MEILI_TEST_FULL_SNAPS` environment variable so that snapshot are fully created locally:
|
||||
|
||||
```
|
||||
export MEILI_TEST_FULL_SNAPS=true # add this to your .bashrc, .zshrc, ...
|
||||
```
|
||||
|
||||
#### Test troubleshooting
|
||||
|
||||
If you get a "Too many open files" error you might want to increase the open file limit using this command:
|
||||
|
||||
```bash
|
||||
@ -99,6 +116,14 @@ _[Read more about this](https://github.com/meilisearch/integration-guides/blob/m
|
||||
|
||||
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/engine-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
||||
|
||||
### How to publish a prototype
|
||||
|
||||
Depending on the developed feature, you might need to provide a prototyped version of Meilisearch to make it easier to test by the users.
|
||||
|
||||
This happens in two steps:
|
||||
- [Release the prototype](https://github.com/meilisearch/engine-team/blob/main/resources/prototypes.md#how-to-publish-a-prototype)
|
||||
- [Communicate about it](https://github.com/meilisearch/engine-team/blob/main/resources/prototypes.md#communication)
|
||||
|
||||
### Release assets
|
||||
|
||||
For each release, the following assets are created:
|
||||
|
1537
Cargo.lock
generated
1537
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
39
Cargo.toml
39
Cargo.toml
@ -9,13 +9,50 @@ members = [
|
||||
"dump",
|
||||
"file-store",
|
||||
"permissive-json-pointer",
|
||||
"milli",
|
||||
"filter-parser",
|
||||
"flatten-serde-json",
|
||||
"json-depth-checker",
|
||||
"benchmarks"
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.2.0"
|
||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||
description = "Meilisearch HTTP server"
|
||||
homepage = "https://meilisearch.com"
|
||||
readme = "README.md"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dev.package.flate2]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.milli]
|
||||
[profile.dev.package.grenad]
|
||||
opt-level = 3
|
||||
[profile.dev.package.roaring]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.lindera-ipadic-builder]
|
||||
opt-level = 3
|
||||
[profile.dev.package.encoding]
|
||||
opt-level = 3
|
||||
[profile.dev.package.yada]
|
||||
opt-level = 3
|
||||
|
||||
[profile.release.package.lindera-ipadic-builder]
|
||||
opt-level = 3
|
||||
[profile.release.package.encoding]
|
||||
opt-level = 3
|
||||
[profile.release.package.yada]
|
||||
opt-level = 3
|
||||
|
||||
[profile.bench.package.lindera-ipadic-builder]
|
||||
opt-level = 3
|
||||
[profile.bench.package.encoding]
|
||||
opt-level = 3
|
||||
[profile.bench.package.yada]
|
||||
opt-level = 3
|
||||
|
@ -1,3 +1,4 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# Compile
|
||||
FROM rust:alpine3.16 AS compiler
|
||||
|
||||
@ -7,10 +8,11 @@ WORKDIR /meilisearch
|
||||
|
||||
ARG COMMIT_SHA
|
||||
ARG COMMIT_DATE
|
||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE}
|
||||
ARG GIT_TAG
|
||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
COPY . .
|
||||
COPY --link . .
|
||||
RUN set -eux; \
|
||||
apkArch="$(apk --print-arch)"; \
|
||||
if [ "$apkArch" = "aarch64" ]; then \
|
||||
@ -29,7 +31,7 @@ RUN apk update --quiet \
|
||||
|
||||
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
||||
# to find.
|
||||
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||
COPY --from=compiler --link /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
||||
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
||||
RUN ln -s /bin/meilisearch /meilisearch
|
||||
|
58
README.md
58
README.md
@ -7,16 +7,15 @@
|
||||
<a href="https://www.meilisearch.com">Website</a> |
|
||||
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
||||
<a href="https://blog.meilisearch.com">Blog</a> |
|
||||
<a href="https://docs.meilisearch.com">Documentation</a> |
|
||||
<a href="https://docs.meilisearch.com/faq/">FAQ</a> |
|
||||
<a href="https://www.meilisearch.com/docs">Documentation</a> |
|
||||
<a href="https://www.meilisearch.com/docs/faq">FAQ</a> |
|
||||
<a href="https://discord.meilisearch.com">Discord</a>
|
||||
</h4>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/meilisearch/meilisearch/actions"><img src="https://github.com/meilisearch/meilisearch/workflows/Cargo%20test/badge.svg" alt="Build Status"></a>
|
||||
<a href="https://deps.rs/repo/github/meilisearch/meilisearch"><img src="https://deps.rs/repo/github/meilisearch/meilisearch/status.svg" alt="Dependency status"></a>
|
||||
<a href="https://github.com/meilisearch/meilisearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
||||
<a href="https://app.bors.tech/repositories/26457"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
||||
<a href="https://ms-bors.herokuapp.com/repositories/52"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">⚡ A lightning-fast search engine that fits effortlessly into your apps, websites, and workflow 🔍</p>
|
||||
@ -37,27 +36,27 @@ Meilisearch helps you shape a delightful search experience in a snap, offering f
|
||||
## ✨ Features
|
||||
|
||||
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
||||
- **[Typo tolerance](https://docs.meilisearch.com/learn/getting_started/customizing_relevancy.html#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
||||
- **[Filtering and faceted search](https://docs.meilisearch.com/learn/advanced/filtering_and_faceted_search.html):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
||||
- **[Sorting](https://docs.meilisearch.com/learn/advanced/sorting.html):** sort results based on price, date, or pretty much anything else your users need
|
||||
- **[Synonym support](https://docs.meilisearch.com/learn/getting_started/customizing_relevancy.html#synonyms):** configure synonyms to include more relevant content in your search results
|
||||
- **[Geosearch](https://docs.meilisearch.com/learn/advanced/geosearch.html):** filter and sort documents based on geographic data
|
||||
- **[Extensive language support](https://docs.meilisearch.com/learn/what_is_meilisearch/language.html):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||
- **[Security management](https://docs.meilisearch.com/learn/security/master_api_keys.html):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||
- **[Multi-Tenancy](https://docs.meilisearch.com/learn/security/tenant_tokens.html):** personalize search results for any number of application tenants
|
||||
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
||||
- **[Filtering](https://www.meilisearch.com/docs/learn/advanced/filtering) and [faceted search](https://www.meilisearch.com/docs/learn/advanced/faceted_search):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
||||
- **[Sorting](https://www.meilisearch.com/docs/learn/advanced/sorting):** sort results based on price, date, or pretty much anything else your users need
|
||||
- **[Synonym support](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy#synonyms):** configure synonyms to include more relevant content in your search results
|
||||
- **[Geosearch](https://www.meilisearch.com/docs/learn/advanced/geosearch):** filter and sort documents based on geographic data
|
||||
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens):** personalize search results for any number of application tenants
|
||||
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
||||
- **[RESTful API](https://docs.meilisearch.com/reference/api/overview.html):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
||||
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
||||
- **Easy to install, deploy, and maintain**
|
||||
|
||||
## 📖 Documentation
|
||||
|
||||
You can consult Meilisearch's documentation at [https://docs.meilisearch.com](https://docs.meilisearch.com/).
|
||||
You can consult Meilisearch's documentation at [https://www.meilisearch.com/docs](https://www.meilisearch.com/docs/).
|
||||
|
||||
## 🚀 Getting started
|
||||
|
||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://docs.meilisearch.com/learn/getting_started/quick_start.html) guide.
|
||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start) guide.
|
||||
|
||||
You may also want to check out [Meilisearch 101](https://docs.meilisearch.com/learn/getting_started/filtering_and_sorting.html) for an introduction to some of Meilisearch's most popular features.
|
||||
You may also want to check out [Meilisearch 101](https://www.meilisearch.com/docs/learn/getting_started/filtering_and_sorting) for an introduction to some of Meilisearch's most popular features.
|
||||
|
||||
## ☁️ Meilisearch cloud
|
||||
|
||||
@ -67,25 +66,25 @@ Let us manage your infrastructure so you can focus on integrating a great search
|
||||
|
||||
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
||||
|
||||
Take a look at the complete [Meilisearch integration list](https://docs.meilisearch.com/learn/what_is_meilisearch/sdks.html).
|
||||
Take a look at the complete [Meilisearch integration list](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks).
|
||||
|
||||

|
||||
[](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks)
|
||||
|
||||
## ⚙️ Advanced usage
|
||||
|
||||
Experienced users will want to keep our [API Reference](https://docs.meilisearch.com/reference/api) close at hand.
|
||||
Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview) close at hand.
|
||||
|
||||
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://docs.meilisearch.com/learn/advanced/filtering_and_faceted_search.html), [sorting](https://docs.meilisearch.com/learn/advanced/sorting.html), [geosearch](https://docs.meilisearch.com/learn/advanced/geosearch.html), [API keys](https://docs.meilisearch.com/learn/security/master_api_keys.html), and [tenant tokens](https://docs.meilisearch.com/learn/security/tenant_tokens.html).
|
||||
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://www.meilisearch.com/docs/learn/advanced/filtering), [sorting](https://www.meilisearch.com/docs/learn/advanced/sorting), [geosearch](https://www.meilisearch.com/docs/learn/advanced/geosearch), [API keys](https://www.meilisearch.com/docs/learn/security/master_api_keys), and [tenant tokens](https://www.meilisearch.com/docs/learn/security/tenant_tokens).
|
||||
|
||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://docs.meilisearch.com/learn/core_concepts/documents.html) and [indexes](https://docs.meilisearch.com/learn/core_concepts/indexes.html).
|
||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes).
|
||||
|
||||
## 📊 Telemetry
|
||||
|
||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html#how-to-disable-data-collection) whenever you want.
|
||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry#how-to-disable-data-collection) whenever you want.
|
||||
|
||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||
|
||||
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html) of our documentation.
|
||||
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry) of our documentation.
|
||||
|
||||
## 📫 Get in touch!
|
||||
|
||||
@ -98,6 +97,17 @@ Meilisearch is a search engine created by [Meili](https://www.welcometothejungle
|
||||
- For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions)
|
||||
- Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)!
|
||||
- Want to be part of our Discord community? [Join us!](https://discord.gg/meilisearch)
|
||||
- For everything else, please check [this page listing some of the other places where you can find us](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html)
|
||||
|
||||
Thank you for your support!
|
||||
|
||||
## 👩💻 Contributing
|
||||
|
||||
Meilisearch is, and will always be, open-source! If you want to contribute to the project, please take a look at [our contribution guidelines](CONTRIBUTING.md).
|
||||
|
||||
## 📦 Versioning
|
||||
|
||||
Meilisearch releases and their associated binaries are available [in this GitHub page](https://github.com/meilisearch/meilisearch/releases).
|
||||
|
||||
The binaries are versioned following [SemVer conventions](https://semver.org/). To know more, read our [versioning policy](https://github.com/meilisearch/engine-team/blob/main/resources/versioning-policy.md).
|
||||
|
||||
Differently from the binaries, crates in this repository are not currently available on [crates.io](https://crates.io/) and do not follow [SemVer conventions](https://semver.org).
|
||||
|
6
assets/milli-logo.svg
Normal file
6
assets/milli-logo.svg
Normal file
@ -0,0 +1,6 @@
|
||||
<svg width="277" height="236" viewBox="0 0 277 236" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M213.085 190L242.907 86H276.196L246.375 190H213.085Z" fill="#494949"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M0 190L29.8215 86H63.1111L33.2896 190H0Z" fill="#494949"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M124.986 0L57.5772 235.083L60.7752 236H90.6038L158.276 0H124.986Z" fill="#494949"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M195.273 0L127.601 236H160.891L228.563 0H195.273Z" fill="#494949"/>
|
||||
</svg>
|
After Width: | Height: | Size: 585 B |
1
benchmarks/.gitignore
vendored
Normal file
1
benchmarks/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
benches/datasets_paths.rs
|
50
benchmarks/Cargo.toml
Normal file
50
benchmarks/Cargo.toml
Normal file
@ -0,0 +1,50 @@
|
||||
[package]
|
||||
name = "benchmarks"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.70"
|
||||
csv = "1.2.1"
|
||||
milli = { path = "../milli" }
|
||||
mimalloc = { version = "0.1.36", default-features = false }
|
||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.4.0", features = ["html_reports"] }
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
roaring = "0.10.1"
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = "1.0.70"
|
||||
bytes = "1.4.0"
|
||||
convert_case = "0.6.0"
|
||||
flate2 = "1.0.25"
|
||||
reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["milli/all-tokenizations"]
|
||||
|
||||
[[bench]]
|
||||
name = "search_songs"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "search_wiki"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "search_geo"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "indexing"
|
||||
harness = false
|
138
benchmarks/README.md
Normal file
138
benchmarks/README.md
Normal file
@ -0,0 +1,138 @@
|
||||
Benchmarks
|
||||
==========
|
||||
|
||||
## TOC
|
||||
|
||||
- [Run the benchmarks](#run-the-benchmarks)
|
||||
- [Comparison between benchmarks](#comparison-between-benchmarks)
|
||||
- [Datasets](#datasets)
|
||||
|
||||
## Run the benchmarks
|
||||
|
||||
### On our private server
|
||||
|
||||
The Meili team has self-hosted his own GitHub runner to run benchmarks on our dedicated bare metal server.
|
||||
|
||||
To trigger the benchmark workflow:
|
||||
- Go to the `Actions` tab of this repository.
|
||||
- Select the `Benchmarks` workflow on the left.
|
||||
- Click on `Run workflow` in the blue banner.
|
||||
- Select the branch on which you want to run the benchmarks and select the dataset you want (default: `songs`).
|
||||
- Finally, click on `Run workflow`.
|
||||
|
||||
This GitHub workflow will run the benchmarks and push the `critcmp` report to a DigitalOcean Space (= S3).
|
||||
|
||||
The name of the uploaded file is displayed in the workflow.
|
||||
|
||||
_[More about critcmp](https://github.com/BurntSushi/critcmp)._
|
||||
|
||||
💡 To compare the just-uploaded benchmark with another one, check out the [next section](#comparison-between-benchmarks).
|
||||
|
||||
### On your machine
|
||||
|
||||
To run all the benchmarks (~5h):
|
||||
|
||||
```bash
|
||||
cargo bench
|
||||
```
|
||||
|
||||
To run only the `search_songs` (~1h), `search_wiki` (~3h), `search_geo` (~20m) or `indexing` (~2h) benchmark:
|
||||
|
||||
```bash
|
||||
cargo bench --bench <dataset name>
|
||||
```
|
||||
|
||||
By default, the benchmarks will be downloaded and uncompressed automatically in the target directory.<br>
|
||||
If you don't want to download the datasets every time you update something on the code, you can specify a custom directory with the environment variable `MILLI_BENCH_DATASETS_PATH`:
|
||||
|
||||
```bash
|
||||
mkdir ~/datasets
|
||||
MILLI_BENCH_DATASETS_PATH=~/datasets cargo bench --bench search_songs # the four datasets are downloaded
|
||||
touch build.rs
|
||||
MILLI_BENCH_DATASETS_PATH=~/datasets cargo bench --bench songs # the code is compiled again but the datasets are not downloaded
|
||||
```
|
||||
|
||||
## Comparison between benchmarks
|
||||
|
||||
The benchmark reports we push are generated with `critcmp`. Thus, we use `critcmp` to show the result of a benchmark, or compare results between multiple benchmarks.
|
||||
|
||||
We provide a script to download and display the comparison report.
|
||||
|
||||
Requirements:
|
||||
- `grep`
|
||||
- `curl`
|
||||
- [`critcmp`](https://github.com/BurntSushi/critcmp)
|
||||
|
||||
List the available file in the DO Space:
|
||||
|
||||
```bash
|
||||
./benchmarks/script/list.sh
|
||||
```
|
||||
```bash
|
||||
songs_main_09a4321.json
|
||||
songs_geosearch_24ec456.json
|
||||
search_songs_main_cb45a10b.json
|
||||
```
|
||||
|
||||
Run the comparison script:
|
||||
|
||||
```bash
|
||||
# we get the result of ONE benchmark, this give you an idea of how much time an operation took
|
||||
./benchmarks/scripts/compare.sh son songs_geosearch_24ec456.json
|
||||
# we compare two benchmarks
|
||||
./benchmarks/scripts/compare.sh songs_main_09a4321.json songs_geosearch_24ec456.json
|
||||
# we compare three benchmarks
|
||||
./benchmarks/scripts/compare.sh songs_main_09a4321.json songs_geosearch_24ec456.json search_songs_main_cb45a10b.json
|
||||
```
|
||||
|
||||
## Datasets
|
||||
|
||||
The benchmarks uses the following datasets:
|
||||
- `smol-songs`
|
||||
- `smol-wiki`
|
||||
- `movies`
|
||||
- `smol-all-countries`
|
||||
|
||||
### Songs
|
||||
|
||||
`smol-songs` is a subset of the [`songs.csv` dataset](https://milli-benchmarks.fra1.digitaloceanspaces.com/datasets/songs.csv.gz).
|
||||
|
||||
It was generated with this command:
|
||||
|
||||
```bash
|
||||
xsv sample --seed 42 1000000 songs.csv -o smol-songs.csv
|
||||
```
|
||||
|
||||
_[Download the generated `smol-songs` dataset](https://milli-benchmarks.fra1.digitaloceanspaces.com/datasets/smol-songs.csv.gz)._
|
||||
|
||||
### Wiki
|
||||
|
||||
`smol-wiki` is a subset of the [`wikipedia-articles.csv` dataset](https://milli-benchmarks.fra1.digitaloceanspaces.com/datasets/wiki-articles.csv.gz).
|
||||
|
||||
It was generated with the following command:
|
||||
|
||||
```bash
|
||||
xsv sample --seed 42 500000 wiki-articles.csv -o smol-wiki-articles.csv
|
||||
```
|
||||
|
||||
_[Download the `smol-wiki` dataset](https://milli-benchmarks.fra1.digitaloceanspaces.com/datasets/smol-wiki-articles.csv.gz)._
|
||||
|
||||
### Movies
|
||||
|
||||
`movies` is a really small dataset we uses as our example in the [getting started](https://www.meilisearch.com/docs/learn/getting_started/quick_start)
|
||||
|
||||
_[Download the `movies` dataset](https://www.meilisearch.com/movies.json)._
|
||||
|
||||
|
||||
### All Countries
|
||||
|
||||
`smol-all-countries` is a subset of the [`all-countries.csv` dataset](https://milli-benchmarks.fra1.digitaloceanspaces.com/datasets/all-countries.csv.gz)
|
||||
It has been converted to jsonlines and then edited so it matches our format for the `_geo` field.
|
||||
|
||||
It was generated with the following command:
|
||||
```bash
|
||||
bat all-countries.csv.gz | gunzip | xsv sample --seed 42 1000000 | csv2json-lite | sd '"latitude":"(.*?)","longitude":"(.*?)"' '"_geo": { "lat": $1, "lng": $2 }' | sd '\[|\]|,$' '' | gzip > smol-all-countries.jsonl.gz
|
||||
```
|
||||
|
||||
_[Download the `smol-all-countries` dataset](https://milli-benchmarks.fra1.digitaloceanspaces.com/datasets/smol-all-countries.jsonl.gz)._
|
||||
|
1380
benchmarks/benches/indexing.rs
Normal file
1380
benchmarks/benches/indexing.rs
Normal file
File diff suppressed because it is too large
Load Diff
122
benchmarks/benches/search_geo.rs
Normal file
122
benchmarks/benches/search_geo.rs
Normal file
@ -0,0 +1,122 @@
|
||||
mod datasets_paths;
|
||||
mod utils;
|
||||
|
||||
use criterion::{criterion_group, criterion_main};
|
||||
use milli::update::Settings;
|
||||
use utils::Conf;
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
fn base_conf(builder: &mut Settings) {
|
||||
let displayed_fields =
|
||||
["geonameid", "name", "asciiname", "alternatenames", "_geo", "population"]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
builder.set_displayed_fields(displayed_fields);
|
||||
|
||||
let searchable_fields =
|
||||
["name", "alternatenames", "elevation"].iter().map(|s| s.to_string()).collect();
|
||||
builder.set_searchable_fields(searchable_fields);
|
||||
|
||||
let filterable_fields =
|
||||
["_geo", "population", "elevation"].iter().map(|s| s.to_string()).collect();
|
||||
builder.set_filterable_fields(filterable_fields);
|
||||
|
||||
let sortable_fields =
|
||||
["_geo", "population", "elevation"].iter().map(|s| s.to_string()).collect();
|
||||
builder.set_sortable_fields(sortable_fields);
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
const BASE_CONF: Conf = Conf {
|
||||
dataset: datasets_paths::SMOL_ALL_COUNTRIES,
|
||||
dataset_format: "jsonl",
|
||||
queries: &[
|
||||
"",
|
||||
],
|
||||
configure: base_conf,
|
||||
primary_key: Some("geonameid"),
|
||||
..Conf::BASE
|
||||
};
|
||||
|
||||
fn bench_geo(c: &mut criterion::Criterion) {
|
||||
#[rustfmt::skip]
|
||||
let confs = &[
|
||||
// A basic placeholder with no geo
|
||||
utils::Conf {
|
||||
group_name: "placeholder with no geo",
|
||||
..BASE_CONF
|
||||
},
|
||||
// Medium aglomeration: probably the most common usecase
|
||||
utils::Conf {
|
||||
group_name: "asc sort from Lille",
|
||||
sort: Some(vec!["_geoPoint(50.62999333378238, 3.086269263384099):asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "desc sort from Lille",
|
||||
sort: Some(vec!["_geoPoint(50.62999333378238, 3.086269263384099):desc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
// Big agglomeration: a lot of documents close to our point
|
||||
utils::Conf {
|
||||
group_name: "asc sort from Tokyo",
|
||||
sort: Some(vec!["_geoPoint(35.749512532692144, 139.61664952543356):asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "desc sort from Tokyo",
|
||||
sort: Some(vec!["_geoPoint(35.749512532692144, 139.61664952543356):desc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
// The furthest point from any civilization
|
||||
utils::Conf {
|
||||
group_name: "asc sort from Point Nemo",
|
||||
sort: Some(vec!["_geoPoint(-48.87561645055408, -123.39275749319793):asc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "desc sort from Point Nemo",
|
||||
sort: Some(vec!["_geoPoint(-48.87561645055408, -123.39275749319793):desc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
// Filters
|
||||
utils::Conf {
|
||||
group_name: "filter of 100km from Lille",
|
||||
filter: Some("_geoRadius(50.62999333378238, 3.086269263384099, 100000)"),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "filter of 1km from Lille",
|
||||
filter: Some("_geoRadius(50.62999333378238, 3.086269263384099, 1000)"),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "filter of 100km from Tokyo",
|
||||
filter: Some("_geoRadius(35.749512532692144, 139.61664952543356, 100000)"),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "filter of 1km from Tokyo",
|
||||
filter: Some("_geoRadius(35.749512532692144, 139.61664952543356, 1000)"),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "filter of 100km from Point Nemo",
|
||||
filter: Some("_geoRadius(-48.87561645055408, -123.39275749319793, 100000)"),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "filter of 1km from Point Nemo",
|
||||
filter: Some("_geoRadius(-48.87561645055408, -123.39275749319793, 1000)"),
|
||||
..BASE_CONF
|
||||
},
|
||||
];
|
||||
|
||||
utils::run_benches(c, confs);
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_geo);
|
||||
criterion_main!(benches);
|
196
benchmarks/benches/search_songs.rs
Normal file
196
benchmarks/benches/search_songs.rs
Normal file
@ -0,0 +1,196 @@
|
||||
mod datasets_paths;
|
||||
mod utils;
|
||||
|
||||
use criterion::{criterion_group, criterion_main};
|
||||
use milli::update::Settings;
|
||||
use utils::Conf;
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
fn base_conf(builder: &mut Settings) {
|
||||
let displayed_fields =
|
||||
["id", "title", "album", "artist", "genre", "country", "released", "duration"]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
builder.set_displayed_fields(displayed_fields);
|
||||
|
||||
let searchable_fields = ["title", "album", "artist"].iter().map(|s| s.to_string()).collect();
|
||||
builder.set_searchable_fields(searchable_fields);
|
||||
|
||||
let faceted_fields = ["released-timestamp", "duration-float", "genre", "country", "artist"]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
builder.set_filterable_fields(faceted_fields);
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
const BASE_CONF: Conf = Conf {
|
||||
dataset: datasets_paths::SMOL_SONGS,
|
||||
queries: &[
|
||||
"john ", // 9097
|
||||
"david ", // 4794
|
||||
"charles ", // 1957
|
||||
"david bowie ", // 1200
|
||||
"michael jackson ", // 600
|
||||
"thelonious monk ", // 303
|
||||
"charles mingus ", // 142
|
||||
"marcus miller ", // 60
|
||||
"tamo ", // 13
|
||||
"Notstandskomitee ", // 4
|
||||
],
|
||||
configure: base_conf,
|
||||
primary_key: Some("id"),
|
||||
..Conf::BASE
|
||||
};
|
||||
|
||||
fn bench_songs(c: &mut criterion::Criterion) {
|
||||
let default_criterion: Vec<String> =
|
||||
milli::default_criteria().iter().map(|criteria| criteria.to_string()).collect();
|
||||
let default_criterion = default_criterion.iter().map(|s| s.as_str());
|
||||
let asc_default: Vec<&str> =
|
||||
std::iter::once("released-timestamp:asc").chain(default_criterion.clone()).collect();
|
||||
let desc_default: Vec<&str> =
|
||||
std::iter::once("released-timestamp:desc").chain(default_criterion.clone()).collect();
|
||||
|
||||
let basic_with_quote: Vec<String> = BASE_CONF
|
||||
.queries
|
||||
.iter()
|
||||
.map(|s| {
|
||||
s.trim().split(' ').map(|s| format!(r#""{}""#, s)).collect::<Vec<String>>().join(" ")
|
||||
})
|
||||
.collect();
|
||||
let basic_with_quote: &[&str] =
|
||||
&basic_with_quote.iter().map(|s| s.as_str()).collect::<Vec<&str>>();
|
||||
|
||||
#[rustfmt::skip]
|
||||
let confs = &[
|
||||
/* first we bench each criterion alone */
|
||||
utils::Conf {
|
||||
group_name: "proximity",
|
||||
queries: &[
|
||||
"black saint sinner lady ",
|
||||
"les dangeureuses 1960 ",
|
||||
"The Disneyland Sing-Along Chorus ",
|
||||
"Under Great Northern Lights ",
|
||||
"7000 Danses Un Jour Dans Notre Vie ",
|
||||
],
|
||||
criterion: Some(&["proximity"]),
|
||||
optional_words: false,
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "typo",
|
||||
queries: &[
|
||||
"mongus ",
|
||||
"thelonius monk ",
|
||||
"Disnaylande ",
|
||||
"the white striper ",
|
||||
"indochie ",
|
||||
"indochien ",
|
||||
"klub des loopers ",
|
||||
"fear of the duck ",
|
||||
"michel depech ",
|
||||
"stromal ",
|
||||
"dire straights ",
|
||||
"Arethla Franklin ",
|
||||
],
|
||||
criterion: Some(&["typo"]),
|
||||
optional_words: false,
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "words",
|
||||
queries: &[
|
||||
"the black saint and the sinner lady and the good doggo ", // four words to pop
|
||||
"les liaisons dangeureuses 1793 ", // one word to pop
|
||||
"The Disneyland Children's Sing-Alone song ", // two words to pop
|
||||
"seven nation mummy ", // one word to pop
|
||||
"7000 Danses / Le Baiser / je me trompe de mots ", // four words to pop
|
||||
"Bring Your Daughter To The Slaughter but now this is not part of the title ", // nine words to pop
|
||||
"whathavenotnsuchforth and a good amount of words to pop to match the first one ", // 13
|
||||
],
|
||||
criterion: Some(&["words"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "asc",
|
||||
criterion: Some(&["released-timestamp:desc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "desc",
|
||||
criterion: Some(&["released-timestamp:desc"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
/* then we bench the asc and desc criterion on top of the default criterion */
|
||||
utils::Conf {
|
||||
group_name: "asc + default",
|
||||
criterion: Some(&asc_default[..]),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "desc + default",
|
||||
criterion: Some(&desc_default[..]),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
/* we bench the filters with the default request */
|
||||
utils::Conf {
|
||||
group_name: "basic filter: <=",
|
||||
filter: Some("released-timestamp <= 946728000"), // year 2000
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "basic filter: TO",
|
||||
filter: Some("released-timestamp 946728000 TO 1262347200"), // year 2000 to 2010
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "big filter",
|
||||
filter: Some("released-timestamp != 1262347200 AND (NOT (released-timestamp = 946728000)) AND (duration-float = 1 OR (duration-float 1.1 TO 1.5 AND released-timestamp > 315576000))"),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
/* the we bench some global / normal search with all the default criterion in the default
|
||||
* order */
|
||||
utils::Conf {
|
||||
group_name: "basic placeholder",
|
||||
queries: &[""],
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "basic without quote",
|
||||
queries: &BASE_CONF
|
||||
.queries
|
||||
.iter()
|
||||
.map(|s| s.trim()) // we remove the space at the end of each request
|
||||
.collect::<Vec<&str>>(),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "basic with quote",
|
||||
queries: basic_with_quote,
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "prefix search",
|
||||
queries: &[
|
||||
"s", // 500k+ results
|
||||
"a", //
|
||||
"b", //
|
||||
"i", //
|
||||
"x", // only 7k results
|
||||
],
|
||||
..BASE_CONF
|
||||
},
|
||||
];
|
||||
|
||||
utils::run_benches(c, confs);
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_songs);
|
||||
criterion_main!(benches);
|
129
benchmarks/benches/search_wiki.rs
Normal file
129
benchmarks/benches/search_wiki.rs
Normal file
@ -0,0 +1,129 @@
|
||||
mod datasets_paths;
|
||||
mod utils;
|
||||
|
||||
use criterion::{criterion_group, criterion_main};
|
||||
use milli::update::Settings;
|
||||
use utils::Conf;
|
||||
|
||||
#[global_allocator]
|
||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
fn base_conf(builder: &mut Settings) {
|
||||
let displayed_fields = ["title", "body", "url"].iter().map(|s| s.to_string()).collect();
|
||||
builder.set_displayed_fields(displayed_fields);
|
||||
|
||||
let searchable_fields = ["title", "body"].iter().map(|s| s.to_string()).collect();
|
||||
builder.set_searchable_fields(searchable_fields);
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
const BASE_CONF: Conf = Conf {
|
||||
dataset: datasets_paths::SMOL_WIKI_ARTICLES,
|
||||
queries: &[
|
||||
"mingus ", // 46 candidates
|
||||
"miles davis ", // 159
|
||||
"rock and roll ", // 1007
|
||||
"machine ", // 3448
|
||||
"spain ", // 7002
|
||||
"japan ", // 10.593
|
||||
"france ", // 17.616
|
||||
"film ", // 24.959
|
||||
],
|
||||
configure: base_conf,
|
||||
..Conf::BASE
|
||||
};
|
||||
|
||||
fn bench_songs(c: &mut criterion::Criterion) {
|
||||
let basic_with_quote: Vec<String> = BASE_CONF
|
||||
.queries
|
||||
.iter()
|
||||
.map(|s| {
|
||||
s.trim().split(' ').map(|s| format!(r#""{}""#, s)).collect::<Vec<String>>().join(" ")
|
||||
})
|
||||
.collect();
|
||||
let basic_with_quote: &[&str] =
|
||||
&basic_with_quote.iter().map(|s| s.as_str()).collect::<Vec<&str>>();
|
||||
|
||||
#[rustfmt::skip]
|
||||
let confs = &[
|
||||
/* first we bench each criterion alone */
|
||||
utils::Conf {
|
||||
group_name: "proximity",
|
||||
queries: &[
|
||||
"herald sings ",
|
||||
"april paris ",
|
||||
"tea two ",
|
||||
"diesel engine ",
|
||||
],
|
||||
criterion: Some(&["proximity"]),
|
||||
optional_words: false,
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "typo",
|
||||
queries: &[
|
||||
"migrosoft ",
|
||||
"linax ",
|
||||
"Disnaylande ",
|
||||
"phytogropher ",
|
||||
"nympalidea ",
|
||||
"aritmetric ",
|
||||
"the fronce ",
|
||||
"sisan ",
|
||||
],
|
||||
criterion: Some(&["typo"]),
|
||||
optional_words: false,
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "words",
|
||||
queries: &[
|
||||
"the black saint and the sinner lady and the good doggo ", // four words to pop, 27 results
|
||||
"Kameya Tokujirō mingus monk ", // two words to pop, 55
|
||||
"Ulrich Hensel meilisearch milli ", // two words to pop, 306
|
||||
"Idaho Bellevue pizza ", // one word to pop, 800
|
||||
"Abraham machin ", // one word to pop, 1141
|
||||
],
|
||||
criterion: Some(&["words"]),
|
||||
..BASE_CONF
|
||||
},
|
||||
/* the we bench some global / normal search with all the default criterion in the default
|
||||
* order */
|
||||
utils::Conf {
|
||||
group_name: "basic placeholder",
|
||||
queries: &[""],
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "basic without quote",
|
||||
queries: &BASE_CONF
|
||||
.queries
|
||||
.iter()
|
||||
.map(|s| s.trim()) // we remove the space at the end of each request
|
||||
.collect::<Vec<&str>>(),
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "basic with quote",
|
||||
queries: basic_with_quote,
|
||||
..BASE_CONF
|
||||
},
|
||||
utils::Conf {
|
||||
group_name: "prefix search",
|
||||
queries: &[
|
||||
"t", // 453k results
|
||||
"c", // 405k
|
||||
"g", // 318k
|
||||
"j", // 227k
|
||||
"q", // 71k
|
||||
"x", // 17k
|
||||
],
|
||||
..BASE_CONF
|
||||
},
|
||||
];
|
||||
|
||||
utils::run_benches(c, confs);
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_songs);
|
||||
criterion_main!(benches);
|
256
benchmarks/benches/utils.rs
Normal file
256
benchmarks/benches/utils.rs
Normal file
@ -0,0 +1,256 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::fs::{create_dir_all, remove_dir_all, File};
|
||||
use std::io::{self, BufRead, BufReader, Cursor, Read, Seek};
|
||||
use std::num::ParseFloatError;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
|
||||
use criterion::BenchmarkId;
|
||||
use milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||
use milli::heed::EnvOpenOptions;
|
||||
use milli::update::{
|
||||
IndexDocuments, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig, Settings,
|
||||
};
|
||||
use milli::{Criterion, Filter, Index, Object, TermsMatchingStrategy};
|
||||
use serde_json::Value;
|
||||
|
||||
pub struct Conf<'a> {
|
||||
/// where we are going to create our database.mmdb directory
|
||||
/// each benchmark will first try to delete it and then recreate it
|
||||
pub database_name: &'a str,
|
||||
/// the dataset to be used, it must be an uncompressed csv
|
||||
pub dataset: &'a str,
|
||||
/// The format of the dataset
|
||||
pub dataset_format: &'a str,
|
||||
pub group_name: &'a str,
|
||||
pub queries: &'a [&'a str],
|
||||
/// here you can change which criterion are used and in which order.
|
||||
/// - if you specify something all the base configuration will be thrown out
|
||||
/// - if you don't specify anything (None) the default configuration will be kept
|
||||
pub criterion: Option<&'a [&'a str]>,
|
||||
/// the last chance to configure your database as you want
|
||||
pub configure: fn(&mut Settings),
|
||||
pub filter: Option<&'a str>,
|
||||
pub sort: Option<Vec<&'a str>>,
|
||||
/// enable or disable the optional words on the query
|
||||
pub optional_words: bool,
|
||||
/// primary key, if there is None we'll auto-generate docids for every documents
|
||||
pub primary_key: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl Conf<'_> {
|
||||
pub const BASE: Self = Conf {
|
||||
database_name: "benches.mmdb",
|
||||
dataset_format: "csv",
|
||||
dataset: "",
|
||||
group_name: "",
|
||||
queries: &[],
|
||||
criterion: None,
|
||||
configure: |_| (),
|
||||
filter: None,
|
||||
sort: None,
|
||||
optional_words: true,
|
||||
primary_key: None,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn base_setup(conf: &Conf) -> Index {
|
||||
match remove_dir_all(conf.database_name) {
|
||||
Ok(_) => (),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => (),
|
||||
Err(e) => panic!("{}", e),
|
||||
}
|
||||
create_dir_all(conf.database_name).unwrap();
|
||||
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
|
||||
options.max_readers(10);
|
||||
let index = Index::new(options, conf.database_name).unwrap();
|
||||
|
||||
let config = IndexerConfig::default();
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = Settings::new(&mut wtxn, &index, &config);
|
||||
|
||||
if let Some(primary_key) = conf.primary_key {
|
||||
builder.set_primary_key(primary_key.to_string());
|
||||
}
|
||||
|
||||
if let Some(criterion) = conf.criterion {
|
||||
builder.reset_filterable_fields();
|
||||
builder.reset_criteria();
|
||||
builder.reset_stop_words();
|
||||
|
||||
let criterion = criterion.iter().map(|s| Criterion::from_str(s).unwrap()).collect();
|
||||
builder.set_criteria(criterion);
|
||||
}
|
||||
|
||||
(conf.configure)(&mut builder);
|
||||
|
||||
builder.execute(|_| (), || false).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let config = IndexerConfig::default();
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let indexing_config = IndexDocumentsConfig {
|
||||
autogenerate_docids: conf.primary_key.is_none(),
|
||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..Default::default()
|
||||
};
|
||||
let builder =
|
||||
IndexDocuments::new(&mut wtxn, &index, &config, indexing_config, |_| (), || false).unwrap();
|
||||
let documents = documents_from(conf.dataset, conf.dataset_format);
|
||||
let (builder, user_error) = builder.add_documents(documents).unwrap();
|
||||
user_error.unwrap();
|
||||
builder.execute().unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
index
|
||||
}
|
||||
|
||||
pub fn run_benches(c: &mut criterion::Criterion, confs: &[Conf]) {
|
||||
for conf in confs {
|
||||
let index = base_setup(conf);
|
||||
|
||||
let file_name = Path::new(conf.dataset).file_name().and_then(|f| f.to_str()).unwrap();
|
||||
let name = format!("{}: {}", file_name, conf.group_name);
|
||||
let mut group = c.benchmark_group(&name);
|
||||
|
||||
for &query in conf.queries {
|
||||
group.bench_with_input(BenchmarkId::from_parameter(query), &query, |b, &query| {
|
||||
b.iter(|| {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let mut search = index.search(&rtxn);
|
||||
search.query(query).terms_matching_strategy(TermsMatchingStrategy::default());
|
||||
if let Some(filter) = conf.filter {
|
||||
let filter = Filter::from_str(filter).unwrap().unwrap();
|
||||
search.filter(filter);
|
||||
}
|
||||
if let Some(sort) = &conf.sort {
|
||||
let sort = sort.iter().map(|sort| sort.parse().unwrap()).collect();
|
||||
search.sort_criteria(sort);
|
||||
}
|
||||
let _ids = search.execute().unwrap();
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
|
||||
index.prepare_for_closing().wait();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn documents_from(filename: &str, filetype: &str) -> DocumentsBatchReader<impl BufRead + Seek> {
|
||||
let reader = File::open(filename)
|
||||
.unwrap_or_else(|_| panic!("could not find the dataset in: {}", filename));
|
||||
let reader = BufReader::new(reader);
|
||||
let documents = match filetype {
|
||||
"csv" => documents_from_csv(reader).unwrap(),
|
||||
"json" => documents_from_json(reader).unwrap(),
|
||||
"jsonl" => documents_from_jsonl(reader).unwrap(),
|
||||
otherwise => panic!("invalid update format {:?}", otherwise),
|
||||
};
|
||||
DocumentsBatchReader::from_reader(Cursor::new(documents)).unwrap()
|
||||
}
|
||||
|
||||
fn documents_from_jsonl(reader: impl BufRead) -> anyhow::Result<Vec<u8>> {
|
||||
let mut documents = DocumentsBatchBuilder::new(Vec::new());
|
||||
|
||||
for result in serde_json::Deserializer::from_reader(reader).into_iter::<Object>() {
|
||||
let object = result?;
|
||||
documents.append_json_object(&object)?;
|
||||
}
|
||||
|
||||
documents.into_inner().map_err(Into::into)
|
||||
}
|
||||
|
||||
fn documents_from_json(reader: impl BufRead) -> anyhow::Result<Vec<u8>> {
|
||||
let mut documents = DocumentsBatchBuilder::new(Vec::new());
|
||||
|
||||
documents.append_json_array(reader)?;
|
||||
|
||||
documents.into_inner().map_err(Into::into)
|
||||
}
|
||||
|
||||
fn documents_from_csv(reader: impl BufRead) -> anyhow::Result<Vec<u8>> {
|
||||
let csv = csv::Reader::from_reader(reader);
|
||||
|
||||
let mut documents = DocumentsBatchBuilder::new(Vec::new());
|
||||
documents.append_csv(csv)?;
|
||||
|
||||
documents.into_inner().map_err(Into::into)
|
||||
}
|
||||
|
||||
enum AllowedType {
|
||||
String,
|
||||
Number,
|
||||
}
|
||||
|
||||
fn parse_csv_header(header: &str) -> (String, AllowedType) {
|
||||
// if there are several separators we only split on the last one.
|
||||
match header.rsplit_once(':') {
|
||||
Some((field_name, field_type)) => match field_type {
|
||||
"string" => (field_name.to_string(), AllowedType::String),
|
||||
"number" => (field_name.to_string(), AllowedType::Number),
|
||||
// we may return an error in this case.
|
||||
_otherwise => (header.to_string(), AllowedType::String),
|
||||
},
|
||||
None => (header.to_string(), AllowedType::String),
|
||||
}
|
||||
}
|
||||
|
||||
struct CSVDocumentDeserializer<R>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
documents: csv::StringRecordsIntoIter<R>,
|
||||
headers: Vec<(String, AllowedType)>,
|
||||
}
|
||||
|
||||
impl<R: Read> CSVDocumentDeserializer<R> {
|
||||
fn from_reader(reader: R) -> io::Result<Self> {
|
||||
let mut records = csv::Reader::from_reader(reader);
|
||||
|
||||
let headers = records.headers()?.into_iter().map(parse_csv_header).collect();
|
||||
|
||||
Ok(Self { documents: records.into_records(), headers })
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Iterator for CSVDocumentDeserializer<R> {
|
||||
type Item = anyhow::Result<Object>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let csv_document = self.documents.next()?;
|
||||
|
||||
match csv_document {
|
||||
Ok(csv_document) => {
|
||||
let mut document = Object::new();
|
||||
|
||||
for ((field_name, field_type), value) in
|
||||
self.headers.iter().zip(csv_document.into_iter())
|
||||
{
|
||||
let parsed_value: Result<Value, ParseFloatError> = match field_type {
|
||||
AllowedType::Number => {
|
||||
value.parse::<f64>().map(Value::from).map_err(Into::into)
|
||||
}
|
||||
AllowedType::String => Ok(Value::String(value.to_string())),
|
||||
};
|
||||
|
||||
match parsed_value {
|
||||
Ok(value) => drop(document.insert(field_name.to_string(), value)),
|
||||
Err(_e) => {
|
||||
return Some(Err(anyhow::anyhow!(
|
||||
"Value '{}' is not a valid number",
|
||||
value
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(Ok(document))
|
||||
}
|
||||
Err(e) => Some(Err(anyhow::anyhow!("Error parsing csv document: {}", e))),
|
||||
}
|
||||
}
|
||||
}
|
115
benchmarks/build.rs
Normal file
115
benchmarks/build.rs
Normal file
@ -0,0 +1,115 @@
|
||||
use std::fs::File;
|
||||
use std::io::{Cursor, Read, Seek, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{env, fs};
|
||||
|
||||
use bytes::Bytes;
|
||||
use convert_case::{Case, Casing};
|
||||
use flate2::read::GzDecoder;
|
||||
use reqwest::IntoUrl;
|
||||
|
||||
const BASE_URL: &str = "https://milli-benchmarks.fra1.digitaloceanspaces.com/datasets";
|
||||
|
||||
const DATASET_SONGS: (&str, &str) = ("smol-songs", "csv");
|
||||
const DATASET_SONGS_1_2: (&str, &str) = ("smol-songs-1_2", "csv");
|
||||
const DATASET_SONGS_3_4: (&str, &str) = ("smol-songs-3_4", "csv");
|
||||
const DATASET_SONGS_4_4: (&str, &str) = ("smol-songs-4_4", "csv");
|
||||
const DATASET_WIKI: (&str, &str) = ("smol-wiki-articles", "csv");
|
||||
const DATASET_WIKI_1_2: (&str, &str) = ("smol-wiki-articles-1_2", "csv");
|
||||
const DATASET_WIKI_3_4: (&str, &str) = ("smol-wiki-articles-3_4", "csv");
|
||||
const DATASET_WIKI_4_4: (&str, &str) = ("smol-wiki-articles-4_4", "csv");
|
||||
const DATASET_MOVIES: (&str, &str) = ("movies", "json");
|
||||
const DATASET_MOVIES_1_2: (&str, &str) = ("movies-1_2", "json");
|
||||
const DATASET_MOVIES_3_4: (&str, &str) = ("movies-3_4", "json");
|
||||
const DATASET_MOVIES_4_4: (&str, &str) = ("movies-4_4", "json");
|
||||
const DATASET_NESTED_MOVIES: (&str, &str) = ("nested_movies", "json");
|
||||
const DATASET_GEO: (&str, &str) = ("smol-all-countries", "jsonl");
|
||||
|
||||
const ALL_DATASETS: &[(&str, &str)] = &[
|
||||
DATASET_SONGS,
|
||||
DATASET_SONGS_1_2,
|
||||
DATASET_SONGS_3_4,
|
||||
DATASET_SONGS_4_4,
|
||||
DATASET_WIKI,
|
||||
DATASET_WIKI_1_2,
|
||||
DATASET_WIKI_3_4,
|
||||
DATASET_WIKI_4_4,
|
||||
DATASET_MOVIES,
|
||||
DATASET_MOVIES_1_2,
|
||||
DATASET_MOVIES_3_4,
|
||||
DATASET_MOVIES_4_4,
|
||||
DATASET_NESTED_MOVIES,
|
||||
DATASET_GEO,
|
||||
];
|
||||
|
||||
/// The name of the environment variable used to select the path
|
||||
/// of the directory containing the datasets
|
||||
const BASE_DATASETS_PATH_KEY: &str = "MILLI_BENCH_DATASETS_PATH";
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let out_dir = PathBuf::from(env::var(BASE_DATASETS_PATH_KEY).unwrap_or(env::var("OUT_DIR")?));
|
||||
|
||||
let benches_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?).join("benches");
|
||||
let mut manifest_paths_file = File::create(benches_dir.join("datasets_paths.rs"))?;
|
||||
write!(
|
||||
manifest_paths_file,
|
||||
r#"//! This file is generated by the build script.
|
||||
//! Do not modify by hand, use the build.rs file.
|
||||
#![allow(dead_code)]
|
||||
"#
|
||||
)?;
|
||||
writeln!(manifest_paths_file)?;
|
||||
|
||||
for (dataset, extension) in ALL_DATASETS {
|
||||
let out_path = out_dir.join(dataset);
|
||||
let out_file = out_path.with_extension(extension);
|
||||
|
||||
writeln!(
|
||||
&mut manifest_paths_file,
|
||||
r#"pub const {}: &str = {:?};"#,
|
||||
dataset.to_case(Case::ScreamingSnake),
|
||||
out_file.display(),
|
||||
)?;
|
||||
|
||||
if out_file.exists() {
|
||||
eprintln!(
|
||||
"The dataset {} already exists on the file system and will not be downloaded again",
|
||||
out_path.display(),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
let url = format!("{}/{}.{}.gz", BASE_URL, dataset, extension);
|
||||
eprintln!("downloading: {}", url);
|
||||
let bytes = retry(|| download_dataset(url.clone()), 10)?;
|
||||
eprintln!("{} downloaded successfully", url);
|
||||
eprintln!("uncompressing in {}", out_file.display());
|
||||
uncompress_in_file(bytes, &out_file)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn retry<Ok, Err>(fun: impl Fn() -> Result<Ok, Err>, times: usize) -> Result<Ok, Err> {
|
||||
for _ in 0..times {
|
||||
if let ok @ Ok(_) = fun() {
|
||||
return ok;
|
||||
}
|
||||
}
|
||||
fun()
|
||||
}
|
||||
|
||||
fn download_dataset<U: IntoUrl>(url: U) -> anyhow::Result<Cursor<Bytes>> {
|
||||
let bytes =
|
||||
reqwest::blocking::Client::builder().timeout(None).build()?.get(url).send()?.bytes()?;
|
||||
Ok(Cursor::new(bytes))
|
||||
}
|
||||
|
||||
fn uncompress_in_file<R: Read + Seek, P: AsRef<Path>>(bytes: R, path: P) -> anyhow::Result<()> {
|
||||
let path = path.as_ref();
|
||||
let mut gz = GzDecoder::new(bytes);
|
||||
let mut dataset = Vec::new();
|
||||
gz.read_to_end(&mut dataset)?;
|
||||
|
||||
fs::write(path, dataset)?;
|
||||
Ok(())
|
||||
}
|
38
benchmarks/scripts/compare.sh
Executable file
38
benchmarks/scripts/compare.sh
Executable file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Requirements:
|
||||
# - critcmp. See: https://github.com/BurntSushi/critcmp
|
||||
# - curl
|
||||
|
||||
# Usage
|
||||
# $ bash compare.sh json_file1 json_file1
|
||||
# ex: bash compare.sh songs_main_09a4321.json songs_geosearch_24ec456.json
|
||||
|
||||
# Checking that critcmp is installed
|
||||
command -v critcmp > /dev/null 2>&1
|
||||
if [[ "$?" -ne 0 ]]; then
|
||||
echo 'You must install critcmp to make this script work.'
|
||||
echo 'See: https://github.com/BurntSushi/critcmp'
|
||||
echo ' $ cargo install critcmp'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
s3_url='https://milli-benchmarks.fra1.digitaloceanspaces.com/critcmp_results'
|
||||
|
||||
for file in $@
|
||||
do
|
||||
file_s3_url="$s3_url/$file"
|
||||
file_local_path="/tmp/$file"
|
||||
|
||||
if [[ ! -f $file_local_path ]]; then
|
||||
curl $file_s3_url --output $file_local_path --silent
|
||||
if [[ "$?" -ne 0 ]]; then
|
||||
echo 'curl command failed.'
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
path_list=$(echo " $@" | sed 's/ / \/tmp\//g')
|
||||
|
||||
critcmp $path_list
|
14
benchmarks/scripts/list.sh
Executable file
14
benchmarks/scripts/list.sh
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Requirements:
|
||||
# - curl
|
||||
# - grep
|
||||
|
||||
res=$(curl -s https://milli-benchmarks.fra1.digitaloceanspaces.com | grep -o '<Key>[^<]\+' | cut -c 5- | grep critcmp_results/ | cut -c 18-)
|
||||
|
||||
for pattern in "$@"
|
||||
do
|
||||
res=$(echo "$res" | grep $pattern)
|
||||
done
|
||||
|
||||
echo "$res"
|
5
benchmarks/src/lib.rs
Normal file
5
benchmarks/src/lib.rs
Normal file
@ -0,0 +1,5 @@
|
||||
//! This library is only used to isolate the benchmarks
|
||||
//! from the original milli library.
|
||||
//!
|
||||
//! It does not include interesting functions for milli library
|
||||
//! users only for milli contributors.
|
61
config.toml
61
config.toml
@ -1,43 +1,43 @@
|
||||
# This file shows the default configuration of Meilisearch.
|
||||
# All variables are defined here: https://docs.meilisearch.com/learn/configuration/instance_options.html#environment-variables
|
||||
# All variables are defined here: https://www.meilisearch.com/docs/learn/configuration/instance_options#environment-variables
|
||||
|
||||
db_path = "./data.ms"
|
||||
# Designates the location where database files will be created and retrieved.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#database-path
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#database-path
|
||||
|
||||
env = "development"
|
||||
# Configures the instance's environment. Value must be either `production` or `development`.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#environment
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#environment
|
||||
|
||||
http_addr = "localhost:7700"
|
||||
# The address on which the HTTP server will listen.
|
||||
|
||||
# master_key = "YOUR_MASTER_KEY_VALUE"
|
||||
# Sets the instance's master key, automatically protecting all routes except GET /health.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#master-key
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#master-key
|
||||
|
||||
# no_analytics = true
|
||||
# Deactivates Meilisearch's built-in telemetry when provided.
|
||||
# Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||
# All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted at any time.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#disable-analytics
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#disable-analytics
|
||||
|
||||
http_payload_size_limit = "100 MB"
|
||||
# Sets the maximum size of accepted payloads.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#payload-limit-size
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#payload-limit-size
|
||||
|
||||
log_level = "INFO"
|
||||
# Defines how much detail should be present in Meilisearch's logs.
|
||||
# Meilisearch currently supports six log levels, listed in order of increasing verbosity: `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#log-level
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#log-level
|
||||
|
||||
# max_indexing_memory = "2 GiB"
|
||||
# Sets the maximum amount of RAM Meilisearch can use when indexing.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-indexing-memory
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#max-indexing-memory
|
||||
|
||||
# max_indexing_threads = 4
|
||||
# Sets the maximum number of threads Meilisearch can use during indexing.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-indexing-threads
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#max-indexing-threads
|
||||
|
||||
#############
|
||||
### DUMPS ###
|
||||
@ -45,19 +45,19 @@ log_level = "INFO"
|
||||
|
||||
dump_dir = "dumps/"
|
||||
# Sets the directory where Meilisearch will create dump files.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#dumps-destination
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#dump-directory
|
||||
|
||||
# import_dump = "./path/to/my/file.dump"
|
||||
# Imports the dump file located at the specified path. Path must point to a .dump file.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#import-dump
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#import-dump
|
||||
|
||||
ignore_missing_dump = false
|
||||
# Prevents Meilisearch from throwing an error when `import_dump` does not point to a valid dump file.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ignore-missing-dump
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-missing-dump
|
||||
|
||||
ignore_dump_if_db_exists = false
|
||||
# Prevents a Meilisearch instance with an existing database from throwing an error when using `import_dump`.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ignore-dump-if-db-exists
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-dump-if-db-exists
|
||||
|
||||
|
||||
#################
|
||||
@ -68,23 +68,23 @@ schedule_snapshot = false
|
||||
# Enables scheduled snapshots when true, disable when false (the default).
|
||||
# If the value is given as an integer, then enables the scheduled snapshot with the passed value as the interval
|
||||
# between each snapshot, in seconds.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#schedule-snapshot-creation
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#schedule-snapshot-creation
|
||||
|
||||
snapshot_dir = "snapshots/"
|
||||
# Sets the directory where Meilisearch will store snapshots.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#snapshot-destination
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#snapshot-destination
|
||||
|
||||
# import_snapshot = "./path/to/my/snapshot"
|
||||
# Launches Meilisearch after importing a previously-generated snapshot at the given filepath.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#import-snapshot
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#import-snapshot
|
||||
|
||||
ignore_missing_snapshot = false
|
||||
# Prevents a Meilisearch instance from throwing an error when `import_snapshot` does not point to a valid snapshot file.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ignore-missing-snapshot
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-missing-snapshot
|
||||
|
||||
ignore_snapshot_if_db_exists = false
|
||||
# Prevents a Meilisearch instance with an existing database from throwing an error when using `import_snapshot`.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ignore-snapshot-if-db-exists
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ignore-snapshot-if-db-exists
|
||||
|
||||
|
||||
###########
|
||||
@ -93,28 +93,39 @@ ignore_snapshot_if_db_exists = false
|
||||
|
||||
# ssl_auth_path = "./path/to/root"
|
||||
# Enables client authentication in the specified path.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ssl-authentication-path
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-authentication-path
|
||||
|
||||
# ssl_cert_path = "./path/to/certfile"
|
||||
# Sets the server's SSL certificates.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ssl-certificates-path
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-certificates-path
|
||||
|
||||
# ssl_key_path = "./path/to/private-key"
|
||||
# Sets the server's SSL key files.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ssl-key-path
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-key-path
|
||||
|
||||
# ssl_ocsp_path = "./path/to/ocsp-file"
|
||||
# Sets the server's OCSP file.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ssl-ocsp-path
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-ocsp-path
|
||||
|
||||
ssl_require_auth = false
|
||||
# Makes SSL authentication mandatory.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ssl-require-auth
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-require-auth
|
||||
|
||||
ssl_resumption = false
|
||||
# Activates SSL session resumption.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ssl-resumption
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-resumption
|
||||
|
||||
ssl_tickets = false
|
||||
# Activates SSL tickets.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ssl-tickets
|
||||
# https://www.meilisearch.com/docs/learn/configuration/instance_options#ssl-tickets
|
||||
|
||||
#############################
|
||||
### Experimental features ###
|
||||
#############################
|
||||
|
||||
experimental_enable_metrics = false
|
||||
# Experimental metrics feature. For more information, see: <https://github.com/meilisearch/meilisearch/discussions/3518>
|
||||
# Enables the Prometheus metrics on the `GET /metrics` endpoint.
|
||||
|
||||
experimental_reduce_indexing_memory_usage = false
|
||||
# Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
|
||||
|
@ -103,7 +103,7 @@ not_available_failure_usage() {
|
||||
printf "$RED%s\n$DEFAULT" 'ERROR: Meilisearch binary is not available for your OS distribution or your architecture yet.'
|
||||
echo ''
|
||||
echo 'However, you can easily compile the binary from the source files.'
|
||||
echo 'Follow the steps at the page ("Source" tab): https://docs.meilisearch.com/learn/getting_started/installation.html'
|
||||
echo 'Follow the steps at the page ("Source" tab): https://www.meilisearch.com/docs/learn/getting_started/installation'
|
||||
}
|
||||
|
||||
fetch_release_failure_usage() {
|
||||
|
@ -1,25 +1,32 @@
|
||||
[package]
|
||||
name = "dump"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.65"
|
||||
flate2 = "1.0.22"
|
||||
http = "0.2.8"
|
||||
anyhow = "1.0.70"
|
||||
flate2 = "1.0.25"
|
||||
http = "0.2.9"
|
||||
log = "0.4.17"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
once_cell = "1.15.0"
|
||||
regex = "1.6.0"
|
||||
roaring = { version = "0.10.0", features = ["serde"] }
|
||||
serde = { version = "1.0.136", features = ["derive"] }
|
||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
||||
once_cell = "1.17.1"
|
||||
regex = "1.7.3"
|
||||
roaring = { version = "0.10.1", features = ["serde"] }
|
||||
serde = { version = "1.0.160", features = ["derive"] }
|
||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||
tar = "0.4.38"
|
||||
tempfile = "3.3.0"
|
||||
thiserror = "1.0.30"
|
||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
tempfile = "3.5.0"
|
||||
thiserror = "1.0.40"
|
||||
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
big_s = "1.0.2"
|
||||
|
@ -101,6 +101,9 @@ pub enum KindDump {
|
||||
documents_ids: Vec<String>,
|
||||
},
|
||||
DocumentClear,
|
||||
DocumentDeletionByFilter {
|
||||
filter: serde_json::Value,
|
||||
},
|
||||
Settings {
|
||||
settings: Box<meilisearch_types::settings::Settings<Unchecked>>,
|
||||
is_deletion: bool,
|
||||
@ -166,6 +169,9 @@ impl From<KindWithContent> for KindDump {
|
||||
KindWithContent::DocumentDeletion { documents_ids, .. } => {
|
||||
KindDump::DocumentDeletion { documents_ids }
|
||||
}
|
||||
KindWithContent::DocumentDeletionByFilter { filter_expr, .. } => {
|
||||
KindDump::DocumentDeletionByFilter { filter: filter_expr }
|
||||
}
|
||||
KindWithContent::DocumentClear { .. } => KindDump::DocumentClear,
|
||||
KindWithContent::SettingsUpdate {
|
||||
new_settings,
|
||||
@ -203,12 +209,11 @@ pub(crate) mod test {
|
||||
|
||||
use big_s::S;
|
||||
use maplit::btreeset;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::keys::{Action, Key};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::{self};
|
||||
use meilisearch_types::settings::{Checked, Settings};
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use meilisearch_types::tasks::{Details, Status};
|
||||
use serde_json::{json, Map, Value};
|
||||
use time::macros::datetime;
|
||||
@ -341,7 +346,7 @@ pub(crate) mod test {
|
||||
name: Some(S("doggos_key")),
|
||||
uid: Uuid::from_str("9f8a34da-b6b2-42f0-939b-dbd4c3448655").unwrap(),
|
||||
actions: vec![Action::DocumentsAll],
|
||||
indexes: vec![StarOr::Other(IndexUid::from_str("doggos").unwrap())],
|
||||
indexes: vec![IndexUidPattern::from_str("doggos").unwrap()],
|
||||
expires_at: Some(datetime!(4130-03-14 12:21 UTC)),
|
||||
created_at: datetime!(1960-11-15 0:00 UTC),
|
||||
updated_at: datetime!(2022-11-10 0:00 UTC),
|
||||
@ -351,7 +356,7 @@ pub(crate) mod test {
|
||||
name: Some(S("master_key")),
|
||||
uid: Uuid::from_str("4622f717-1c00-47bb-a494-39d76a49b591").unwrap(),
|
||||
actions: vec![Action::All],
|
||||
indexes: vec![StarOr::Star],
|
||||
indexes: vec![IndexUidPattern::all()],
|
||||
expires_at: None,
|
||||
created_at: datetime!(0000-01-01 00:01 UTC),
|
||||
updated_at: datetime!(1964-05-04 17:25 UTC),
|
||||
|
@ -25,7 +25,6 @@ impl CompatV2ToV3 {
|
||||
CompatV2ToV3::Compat(compat) => compat.index_uuid(),
|
||||
};
|
||||
v2_uuids
|
||||
.into_iter()
|
||||
.into_iter()
|
||||
.map(|index| v3::meta::IndexUuid { uid: index.uid, uuid: index.uuid })
|
||||
.collect()
|
||||
|
@ -181,10 +181,8 @@ impl CompatV5ToV6 {
|
||||
.indexes
|
||||
.into_iter()
|
||||
.map(|index| match index {
|
||||
v5::StarOr::Star => v6::StarOr::Star,
|
||||
v5::StarOr::Other(uid) => {
|
||||
v6::StarOr::Other(v6::IndexUid::new_unchecked(uid.as_str()))
|
||||
}
|
||||
v5::StarOr::Star => v6::IndexUidPattern::all(),
|
||||
v5::StarOr::Other(uid) => v6::IndexUidPattern::new_unchecked(uid.as_str()),
|
||||
})
|
||||
.collect(),
|
||||
expires_at: key.expires_at,
|
||||
@ -360,6 +358,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
faceting: match settings.faceting {
|
||||
v5::Setting::Set(faceting) => v6::Setting::Set(v6::FacetingSettings {
|
||||
max_values_per_facet: faceting.max_values_per_facet.into(),
|
||||
sort_facet_values_by: v6::Setting::NotSet,
|
||||
}),
|
||||
v5::Setting::Reset => v6::Setting::Reset,
|
||||
v5::Setting::NotSet => v6::Setting::NotSet,
|
||||
|
@ -112,8 +112,11 @@ impl V3Reader {
|
||||
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<V3IndexReader>> + '_> {
|
||||
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
||||
V3IndexReader::new(
|
||||
index.uid.clone(),
|
||||
&self.dump.path().join("indexes").join(index.uuid.to_string()),
|
||||
index,
|
||||
BufReader::new(
|
||||
File::open(self.dump.path().join("updates").join("data.jsonl")).unwrap(),
|
||||
),
|
||||
)
|
||||
}))
|
||||
}
|
||||
@ -155,16 +158,42 @@ pub struct V3IndexReader {
|
||||
}
|
||||
|
||||
impl V3IndexReader {
|
||||
pub fn new(name: String, path: &Path) -> Result<Self> {
|
||||
pub fn new(path: &Path, index_uuid: &IndexUuid, tasks: BufReader<File>) -> Result<Self> {
|
||||
let meta = File::open(path.join("meta.json"))?;
|
||||
let meta: DumpMeta = serde_json::from_reader(meta)?;
|
||||
|
||||
let mut created_at = None;
|
||||
let mut updated_at = None;
|
||||
|
||||
for line in tasks.lines() {
|
||||
let task: Task = serde_json::from_str(&line?)?;
|
||||
|
||||
if !(task.uuid == index_uuid.uuid && task.is_finished()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let new_created_at = match task.update.meta() {
|
||||
Kind::DocumentAddition { .. } | Kind::Settings(_) => task.update.finished_at(),
|
||||
_ => None,
|
||||
};
|
||||
let new_updated_at = task.update.finished_at();
|
||||
|
||||
if created_at.is_none() || created_at > new_created_at {
|
||||
created_at = new_created_at;
|
||||
}
|
||||
|
||||
if updated_at.is_none() || updated_at < new_updated_at {
|
||||
updated_at = new_updated_at;
|
||||
}
|
||||
}
|
||||
|
||||
let current_time = OffsetDateTime::now_utc();
|
||||
|
||||
let metadata = IndexMetadata {
|
||||
uid: name,
|
||||
uid: index_uuid.uid.clone(),
|
||||
primary_key: meta.primary_key,
|
||||
// FIXME: Iterate over the whole task queue to find the creation and last update date.
|
||||
created_at: OffsetDateTime::now_utc(),
|
||||
updated_at: OffsetDateTime::now_utc(),
|
||||
created_at: created_at.unwrap_or(current_time),
|
||||
updated_at: updated_at.unwrap_or(current_time),
|
||||
};
|
||||
|
||||
let ret = V3IndexReader {
|
||||
@ -263,12 +292,12 @@ pub(crate) mod test {
|
||||
assert!(indexes.is_empty());
|
||||
|
||||
// products
|
||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
||||
{
|
||||
"uid": "products",
|
||||
"primaryKey": "sku",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
"createdAt": "2022-10-07T11:38:54.74389899Z",
|
||||
"updatedAt": "2022-10-07T11:38:55.963185778Z"
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -278,12 +307,12 @@ pub(crate) mod test {
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||
|
||||
// movies
|
||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
||||
{
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
"createdAt": "2022-10-07T11:38:54.026649575Z",
|
||||
"updatedAt": "2022-10-07T11:39:04.188852537Z"
|
||||
}
|
||||
"###);
|
||||
|
||||
@ -308,12 +337,12 @@ pub(crate) mod test {
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||
|
||||
// spells
|
||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
||||
{
|
||||
"uid": "dnd_spells",
|
||||
"primaryKey": "index",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
"createdAt": "2022-10-07T11:38:56.265951133Z",
|
||||
"updatedAt": "2022-10-07T11:38:56.521004328Z"
|
||||
}
|
||||
"###);
|
||||
|
||||
|
@ -74,6 +74,26 @@ impl UpdateStatus {
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enqueued_at(&self) -> Option<OffsetDateTime> {
|
||||
match self {
|
||||
UpdateStatus::Processing(u) => Some(u.from.enqueued_at),
|
||||
UpdateStatus::Enqueued(u) => Some(u.enqueued_at),
|
||||
UpdateStatus::Processed(u) => Some(u.from.from.enqueued_at),
|
||||
UpdateStatus::Aborted(u) => Some(u.from.enqueued_at),
|
||||
UpdateStatus::Failed(u) => Some(u.from.from.enqueued_at),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finished_at(&self) -> Option<OffsetDateTime> {
|
||||
match self {
|
||||
UpdateStatus::Processing(_) => None,
|
||||
UpdateStatus::Enqueued(_) => None,
|
||||
UpdateStatus::Processed(u) => Some(u.processed_at),
|
||||
UpdateStatus::Aborted(_) => None,
|
||||
UpdateStatus::Failed(u) => Some(u.failed_at),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
|
@ -34,8 +34,7 @@ pub type PaginationSettings = meilisearch_types::settings::PaginationSettings;
|
||||
|
||||
// everything related to the api keys
|
||||
pub type Action = meilisearch_types::keys::Action;
|
||||
pub type StarOr<T> = meilisearch_types::star_or::StarOr<T>;
|
||||
pub type IndexUid = meilisearch_types::index_uid::IndexUid;
|
||||
pub type IndexUidPattern = meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
|
||||
// everything related to the errors
|
||||
pub type ResponseError = meilisearch_types::error::ResponseError;
|
||||
|
@ -1,12 +1,19 @@
|
||||
[package]
|
||||
name = "file-store"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
tempfile = "3.3.0"
|
||||
thiserror = "1.0.30"
|
||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
tempfile = "3.5.0"
|
||||
thiserror = "1.0.40"
|
||||
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
faux = "0.1.8"
|
||||
faux = "0.1.9"
|
||||
|
@ -116,10 +116,20 @@ impl FileStore {
|
||||
|
||||
/// List the Uuids of the files in the FileStore
|
||||
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
|
||||
Ok(self.path.read_dir()?.map(|entry| {
|
||||
Ok(Uuid::from_str(
|
||||
entry?.file_name().to_str().ok_or(Error::CouldNotParseFileNameAsUtf8)?,
|
||||
)?)
|
||||
Ok(self.path.read_dir()?.filter_map(|entry| {
|
||||
let file_name = match entry {
|
||||
Ok(entry) => entry.file_name(),
|
||||
Err(e) => return Some(Err(e.into())),
|
||||
};
|
||||
let file_name = match file_name.to_str() {
|
||||
Some(file_name) => file_name,
|
||||
None => return Some(Err(Error::CouldNotParseFileNameAsUtf8)),
|
||||
};
|
||||
if file_name.starts_with('.') {
|
||||
None
|
||||
} else {
|
||||
Some(Uuid::from_str(file_name).map_err(|e| e.into()))
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
@ -135,3 +145,34 @@ impl File {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::io::Write;
|
||||
|
||||
use tempfile::TempDir;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn all_uuids() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let fs = FileStore::new(dir.path()).unwrap();
|
||||
let (uuid, mut file) = fs.new_update().unwrap();
|
||||
file.write_all(b"Hello world").unwrap();
|
||||
file.persist().unwrap();
|
||||
let all_uuids = fs.all_uuids().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(all_uuids, vec![uuid]);
|
||||
|
||||
let (uuid2, file) = fs.new_update().unwrap();
|
||||
let all_uuids = fs.all_uuids().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(all_uuids, vec![uuid]);
|
||||
|
||||
file.persist().unwrap();
|
||||
let mut all_uuids = fs.all_uuids().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
all_uuids.sort();
|
||||
let mut expected = vec![uuid, uuid2];
|
||||
expected.sort();
|
||||
assert_eq!(all_uuids, expected);
|
||||
}
|
||||
}
|
||||
|
19
filter-parser/Cargo.toml
Normal file
19
filter-parser/Cargo.toml
Normal file
@ -0,0 +1,19 @@
|
||||
[package]
|
||||
name = "filter-parser"
|
||||
description = "The parser for the Meilisearch filter syntax"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
# description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
nom = "7.1.3"
|
||||
nom_locate = "4.1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = "1.29.0"
|
36
filter-parser/README.md
Normal file
36
filter-parser/README.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Filter parser
|
||||
|
||||
This workspace is dedicated to the parsing of the Meilisearch filters.
|
||||
|
||||
Most of the code and explanation are in the [`lib.rs`](./src/lib.rs). Especially, the BNF of the filters at the top of this file.
|
||||
|
||||
The parser use [nom](https://docs.rs/nom/) to do most of its work and [nom-locate](https://docs.rs/nom_locate/) to keep track of what we were doing when we encountered an error.
|
||||
|
||||
## Cli
|
||||
A simple main is provided to quick-test if a filter can be parsed or not without bringing milli.
|
||||
It takes one argument and try to parse it.
|
||||
```
|
||||
cargo run -- 'field = value' # success
|
||||
cargo run -- 'field = "doggo' # error => missing closing delimiter "
|
||||
```
|
||||
|
||||
## Fuzz
|
||||
The workspace have been fuzzed with [cargo-fuzz](https://rust-fuzz.github.io/book/cargo-fuzz.html).
|
||||
|
||||
### Setup
|
||||
You'll need rust-nightly to execute the fuzzer.
|
||||
|
||||
```
|
||||
cargo install cargo-fuzz
|
||||
```
|
||||
|
||||
### Run
|
||||
When the filter parser is executed by the fuzzer it's triggering a stackoverflow really fast. We can avoid this problem by limiting the `max_len` of [libfuzzer](https://llvm.org/docs/LibFuzzer.html) at 500 characters.
|
||||
```
|
||||
cargo fuzz run parse -- -max_len=500
|
||||
```
|
||||
|
||||
## What to do if you find a bug in the parser
|
||||
|
||||
- Write a test at the end of the [`lib.rs`](./src/lib.rs) to ensure it never happens again.
|
||||
- Add a file in [the corpus directory](./fuzz/corpus/parse/) with your filter to help the fuzzer find new bugs. Since this directory is going to be heavily polluted by the execution of the fuzzer it's in the gitignore and you'll need to force push your new test.
|
3
filter-parser/fuzz/.gitignore
vendored
Normal file
3
filter-parser/fuzz/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
/corpus/
|
||||
/artifacts/
|
||||
/target/
|
25
filter-parser/fuzz/Cargo.toml
Normal file
25
filter-parser/fuzz/Cargo.toml
Normal file
@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "filter-parser-fuzz"
|
||||
version = "0.0.0"
|
||||
authors = ["Automatically generated"]
|
||||
publish = false
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata]
|
||||
cargo-fuzz = true
|
||||
|
||||
[dependencies]
|
||||
libfuzzer-sys = "0.4"
|
||||
|
||||
[dependencies.filter-parser]
|
||||
path = ".."
|
||||
|
||||
# Prevent this from interfering with workspaces
|
||||
[workspace]
|
||||
members = ["."]
|
||||
|
||||
[[bin]]
|
||||
name = "parse"
|
||||
path = "fuzz_targets/parse.rs"
|
||||
test = false
|
||||
doc = false
|
1
filter-parser/fuzz/corpus/parse/test_1
Normal file
1
filter-parser/fuzz/corpus/parse/test_1
Normal file
@ -0,0 +1 @@
|
||||
channel = Ponce
|
1
filter-parser/fuzz/corpus/parse/test_10
Normal file
1
filter-parser/fuzz/corpus/parse/test_10
Normal file
@ -0,0 +1 @@
|
||||
channel != ponce
|
1
filter-parser/fuzz/corpus/parse/test_11
Normal file
1
filter-parser/fuzz/corpus/parse/test_11
Normal file
@ -0,0 +1 @@
|
||||
NOT channel = ponce
|
1
filter-parser/fuzz/corpus/parse/test_12
Normal file
1
filter-parser/fuzz/corpus/parse/test_12
Normal file
@ -0,0 +1 @@
|
||||
subscribers < 1000
|
1
filter-parser/fuzz/corpus/parse/test_13
Normal file
1
filter-parser/fuzz/corpus/parse/test_13
Normal file
@ -0,0 +1 @@
|
||||
subscribers > 1000
|
1
filter-parser/fuzz/corpus/parse/test_14
Normal file
1
filter-parser/fuzz/corpus/parse/test_14
Normal file
@ -0,0 +1 @@
|
||||
subscribers <= 1000
|
1
filter-parser/fuzz/corpus/parse/test_15
Normal file
1
filter-parser/fuzz/corpus/parse/test_15
Normal file
@ -0,0 +1 @@
|
||||
subscribers >= 1000
|
1
filter-parser/fuzz/corpus/parse/test_16
Normal file
1
filter-parser/fuzz/corpus/parse/test_16
Normal file
@ -0,0 +1 @@
|
||||
NOT subscribers < 1000
|
1
filter-parser/fuzz/corpus/parse/test_17
Normal file
1
filter-parser/fuzz/corpus/parse/test_17
Normal file
@ -0,0 +1 @@
|
||||
NOT subscribers > 1000
|
1
filter-parser/fuzz/corpus/parse/test_18
Normal file
1
filter-parser/fuzz/corpus/parse/test_18
Normal file
@ -0,0 +1 @@
|
||||
NOT subscribers <= 1000
|
1
filter-parser/fuzz/corpus/parse/test_19
Normal file
1
filter-parser/fuzz/corpus/parse/test_19
Normal file
@ -0,0 +1 @@
|
||||
NOT subscribers >= 1000
|
1
filter-parser/fuzz/corpus/parse/test_2
Normal file
1
filter-parser/fuzz/corpus/parse/test_2
Normal file
@ -0,0 +1 @@
|
||||
subscribers = 12
|
1
filter-parser/fuzz/corpus/parse/test_20
Normal file
1
filter-parser/fuzz/corpus/parse/test_20
Normal file
@ -0,0 +1 @@
|
||||
subscribers 100 TO 1000
|
1
filter-parser/fuzz/corpus/parse/test_21
Normal file
1
filter-parser/fuzz/corpus/parse/test_21
Normal file
@ -0,0 +1 @@
|
||||
NOT subscribers 100 TO 1000
|
1
filter-parser/fuzz/corpus/parse/test_22
Normal file
1
filter-parser/fuzz/corpus/parse/test_22
Normal file
@ -0,0 +1 @@
|
||||
_geoRadius(12, 13, 14)
|
1
filter-parser/fuzz/corpus/parse/test_23
Normal file
1
filter-parser/fuzz/corpus/parse/test_23
Normal file
@ -0,0 +1 @@
|
||||
NOT _geoRadius(12, 13, 14)
|
1
filter-parser/fuzz/corpus/parse/test_24
Normal file
1
filter-parser/fuzz/corpus/parse/test_24
Normal file
@ -0,0 +1 @@
|
||||
channel = ponce AND 'dog race' != 'bernese mountain'
|
1
filter-parser/fuzz/corpus/parse/test_25
Normal file
1
filter-parser/fuzz/corpus/parse/test_25
Normal file
@ -0,0 +1 @@
|
||||
channel = ponce OR 'dog race' != 'bernese mountain'
|
1
filter-parser/fuzz/corpus/parse/test_26
Normal file
1
filter-parser/fuzz/corpus/parse/test_26
Normal file
@ -0,0 +1 @@
|
||||
channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000
|
1
filter-parser/fuzz/corpus/parse/test_27
Normal file
1
filter-parser/fuzz/corpus/parse/test_27
Normal file
@ -0,0 +1 @@
|
||||
channel = ponce AND ( 'dog race' != 'bernese mountain' OR subscribers > 1000 )
|
1
filter-parser/fuzz/corpus/parse/test_28
Normal file
1
filter-parser/fuzz/corpus/parse/test_28
Normal file
@ -0,0 +1 @@
|
||||
(channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000) AND _geoRadius(12, 13, 14)
|
1
filter-parser/fuzz/corpus/parse/test_29
Normal file
1
filter-parser/fuzz/corpus/parse/test_29
Normal file
@ -0,0 +1 @@
|
||||
channel = Ponce = 12
|
1
filter-parser/fuzz/corpus/parse/test_3
Normal file
1
filter-parser/fuzz/corpus/parse/test_3
Normal file
@ -0,0 +1 @@
|
||||
channel = 'Mister Mv'
|
1
filter-parser/fuzz/corpus/parse/test_30
Normal file
1
filter-parser/fuzz/corpus/parse/test_30
Normal file
@ -0,0 +1 @@
|
||||
channel =
|
1
filter-parser/fuzz/corpus/parse/test_31
Normal file
1
filter-parser/fuzz/corpus/parse/test_31
Normal file
@ -0,0 +1 @@
|
||||
channel = 🐻
|
1
filter-parser/fuzz/corpus/parse/test_32
Normal file
1
filter-parser/fuzz/corpus/parse/test_32
Normal file
@ -0,0 +1 @@
|
||||
OR
|
1
filter-parser/fuzz/corpus/parse/test_33
Normal file
1
filter-parser/fuzz/corpus/parse/test_33
Normal file
@ -0,0 +1 @@
|
||||
AND
|
1
filter-parser/fuzz/corpus/parse/test_34
Normal file
1
filter-parser/fuzz/corpus/parse/test_34
Normal file
@ -0,0 +1 @@
|
||||
channel Ponce
|
1
filter-parser/fuzz/corpus/parse/test_35
Normal file
1
filter-parser/fuzz/corpus/parse/test_35
Normal file
@ -0,0 +1 @@
|
||||
channel = Ponce OR
|
1
filter-parser/fuzz/corpus/parse/test_36
Normal file
1
filter-parser/fuzz/corpus/parse/test_36
Normal file
@ -0,0 +1 @@
|
||||
_geoRadius
|
1
filter-parser/fuzz/corpus/parse/test_37
Normal file
1
filter-parser/fuzz/corpus/parse/test_37
Normal file
@ -0,0 +1 @@
|
||||
_geoRadius = 12
|
1
filter-parser/fuzz/corpus/parse/test_38
Normal file
1
filter-parser/fuzz/corpus/parse/test_38
Normal file
@ -0,0 +1 @@
|
||||
_geoPoint(12, 13, 14)
|
1
filter-parser/fuzz/corpus/parse/test_39
Normal file
1
filter-parser/fuzz/corpus/parse/test_39
Normal file
@ -0,0 +1 @@
|
||||
position <= _geoPoint(12, 13, 14)
|
1
filter-parser/fuzz/corpus/parse/test_4
Normal file
1
filter-parser/fuzz/corpus/parse/test_4
Normal file
@ -0,0 +1 @@
|
||||
channel = "Mister Mv"
|
1
filter-parser/fuzz/corpus/parse/test_40
Normal file
1
filter-parser/fuzz/corpus/parse/test_40
Normal file
@ -0,0 +1 @@
|
||||
position <= _geoRadius(12, 13, 14)
|
1
filter-parser/fuzz/corpus/parse/test_41
Normal file
1
filter-parser/fuzz/corpus/parse/test_41
Normal file
@ -0,0 +1 @@
|
||||
channel = 'ponce
|
1
filter-parser/fuzz/corpus/parse/test_42
Normal file
1
filter-parser/fuzz/corpus/parse/test_42
Normal file
@ -0,0 +1 @@
|
||||
channel = "ponce
|
1
filter-parser/fuzz/corpus/parse/test_43
Normal file
1
filter-parser/fuzz/corpus/parse/test_43
Normal file
@ -0,0 +1 @@
|
||||
channel = mv OR (followers >= 1000
|
1
filter-parser/fuzz/corpus/parse/test_5
Normal file
1
filter-parser/fuzz/corpus/parse/test_5
Normal file
@ -0,0 +1 @@
|
||||
'dog race' = Borzoi
|
1
filter-parser/fuzz/corpus/parse/test_6
Normal file
1
filter-parser/fuzz/corpus/parse/test_6
Normal file
@ -0,0 +1 @@
|
||||
"dog race" = Chusky
|
1
filter-parser/fuzz/corpus/parse/test_7
Normal file
1
filter-parser/fuzz/corpus/parse/test_7
Normal file
@ -0,0 +1 @@
|
||||
"dog race" = "Bernese Mountain"
|
1
filter-parser/fuzz/corpus/parse/test_8
Normal file
1
filter-parser/fuzz/corpus/parse/test_8
Normal file
@ -0,0 +1 @@
|
||||
'dog race' = 'Bernese Mountain'
|
1
filter-parser/fuzz/corpus/parse/test_9
Normal file
1
filter-parser/fuzz/corpus/parse/test_9
Normal file
@ -0,0 +1 @@
|
||||
"dog race" = 'Bernese Mountain'
|
18
filter-parser/fuzz/fuzz_targets/parse.rs
Normal file
18
filter-parser/fuzz/fuzz_targets/parse.rs
Normal file
@ -0,0 +1,18 @@
|
||||
#![no_main]
|
||||
use filter_parser::{ErrorKind, FilterCondition};
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
|
||||
fuzz_target!(|data: &[u8]| {
|
||||
if let Ok(s) = std::str::from_utf8(data) {
|
||||
// When we are fuzzing the parser we can get a stack overflow very easily.
|
||||
// But since this doesn't happens with a normal build we are just going to limit the fuzzer to 500 characters.
|
||||
if s.len() < 500 {
|
||||
match FilterCondition::parse(s) {
|
||||
Err(e) if matches!(e.kind(), ErrorKind::InternalError(_)) => {
|
||||
panic!("Found an internal error: `{:?}`", e)
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
101
filter-parser/src/condition.rs
Normal file
101
filter-parser/src/condition.rs
Normal file
@ -0,0 +1,101 @@
|
||||
//! BNF grammar:
|
||||
//!
|
||||
//! ```text
|
||||
//! condition = value ("==" | ">" ...) value
|
||||
//! to = value value TO value
|
||||
//! ```
|
||||
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::multispace1;
|
||||
use nom::combinator::cut;
|
||||
use nom::sequence::{terminated, tuple};
|
||||
use Condition::*;
|
||||
|
||||
use crate::{parse_value, FilterCondition, IResult, Span, Token};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Condition<'a> {
|
||||
GreaterThan(Token<'a>),
|
||||
GreaterThanOrEqual(Token<'a>),
|
||||
Equal(Token<'a>),
|
||||
NotEqual(Token<'a>),
|
||||
Null,
|
||||
Empty,
|
||||
Exists,
|
||||
LowerThan(Token<'a>),
|
||||
LowerThanOrEqual(Token<'a>),
|
||||
Between { from: Token<'a>, to: Token<'a> },
|
||||
}
|
||||
|
||||
/// condition = value ("==" | ">" ...) value
|
||||
pub fn parse_condition(input: Span) -> IResult<FilterCondition> {
|
||||
let operator = alt((tag("<="), tag(">="), tag("!="), tag("<"), tag(">"), tag("=")));
|
||||
let (input, (fid, op, value)) = tuple((parse_value, operator, cut(parse_value)))(input)?;
|
||||
|
||||
let condition = match *op.fragment() {
|
||||
"<=" => FilterCondition::Condition { fid, op: LowerThanOrEqual(value) },
|
||||
">=" => FilterCondition::Condition { fid, op: GreaterThanOrEqual(value) },
|
||||
"!=" => FilterCondition::Condition { fid, op: NotEqual(value) },
|
||||
"<" => FilterCondition::Condition { fid, op: LowerThan(value) },
|
||||
">" => FilterCondition::Condition { fid, op: GreaterThan(value) },
|
||||
"=" => FilterCondition::Condition { fid, op: Equal(value) },
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
Ok((input, condition))
|
||||
}
|
||||
|
||||
/// null = value "IS" WS+ "NULL"
|
||||
pub fn parse_is_null(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, key) = parse_value(input)?;
|
||||
|
||||
let (input, _) = tuple((tag("IS"), multispace1, tag("NULL")))(input)?;
|
||||
Ok((input, FilterCondition::Condition { fid: key, op: Null }))
|
||||
}
|
||||
|
||||
/// null = value "IS" WS+ "NOT" WS+ "NULL"
|
||||
pub fn parse_is_not_null(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, key) = parse_value(input)?;
|
||||
|
||||
let (input, _) = tuple((tag("IS"), multispace1, tag("NOT"), multispace1, tag("NULL")))(input)?;
|
||||
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Null }))))
|
||||
}
|
||||
|
||||
/// empty = value "IS" WS+ "EMPTY"
|
||||
pub fn parse_is_empty(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, key) = parse_value(input)?;
|
||||
|
||||
let (input, _) = tuple((tag("IS"), multispace1, tag("EMPTY")))(input)?;
|
||||
Ok((input, FilterCondition::Condition { fid: key, op: Empty }))
|
||||
}
|
||||
|
||||
/// empty = value "IS" WS+ "NOT" WS+ "EMPTY"
|
||||
pub fn parse_is_not_empty(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, key) = parse_value(input)?;
|
||||
|
||||
let (input, _) = tuple((tag("IS"), multispace1, tag("NOT"), multispace1, tag("EMPTY")))(input)?;
|
||||
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Empty }))))
|
||||
}
|
||||
|
||||
/// exist = value "EXISTS"
|
||||
pub fn parse_exists(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, key) = terminated(parse_value, tag("EXISTS"))(input)?;
|
||||
|
||||
Ok((input, FilterCondition::Condition { fid: key, op: Exists }))
|
||||
}
|
||||
/// exist = value "NOT" WS+ "EXISTS"
|
||||
pub fn parse_not_exists(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, key) = parse_value(input)?;
|
||||
|
||||
let (input, _) = tuple((tag("NOT"), multispace1, tag("EXISTS")))(input)?;
|
||||
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
||||
}
|
||||
|
||||
/// to = value value "TO" WS+ value
|
||||
pub fn parse_to(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, (key, from, _, _, to)) =
|
||||
tuple((parse_value, parse_value, tag("TO"), multispace1, cut(parse_value)))(input)?;
|
||||
|
||||
Ok((input, FilterCondition::Condition { fid: key, op: Between { from, to } }))
|
||||
}
|
204
filter-parser/src/error.rs
Normal file
204
filter-parser/src/error.rs
Normal file
@ -0,0 +1,204 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use nom::error::{self, ParseError};
|
||||
use nom::Parser;
|
||||
|
||||
use crate::{IResult, Span};
|
||||
|
||||
pub trait NomErrorExt<E> {
|
||||
fn is_failure(&self) -> bool;
|
||||
fn map_err<O: FnOnce(E) -> E>(self, op: O) -> nom::Err<E>;
|
||||
fn map_fail<O: FnOnce(E) -> E>(self, op: O) -> nom::Err<E>;
|
||||
}
|
||||
|
||||
impl<E> NomErrorExt<E> for nom::Err<E> {
|
||||
fn is_failure(&self) -> bool {
|
||||
matches!(self, Self::Failure(_))
|
||||
}
|
||||
|
||||
fn map_err<O: FnOnce(E) -> E>(self, op: O) -> nom::Err<E> {
|
||||
match self {
|
||||
e @ Self::Failure(_) => e,
|
||||
e => e.map(op),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_fail<O: FnOnce(E) -> E>(self, op: O) -> nom::Err<E> {
|
||||
match self {
|
||||
e @ Self::Error(_) => e,
|
||||
e => e.map(op),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// cut a parser and map the error
|
||||
pub fn cut_with_err<'a, O>(
|
||||
mut parser: impl FnMut(Span<'a>) -> IResult<'a, O>,
|
||||
mut with: impl FnMut(Error<'a>) -> Error<'a>,
|
||||
) -> impl FnMut(Span<'a>) -> IResult<O> {
|
||||
move |input| match parser.parse(input) {
|
||||
Err(nom::Err::Error(e)) => Err(nom::Err::Failure(with(e))),
|
||||
rest => rest,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Error<'a> {
|
||||
context: Span<'a>,
|
||||
kind: ErrorKind<'a>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ExpectedValueKind {
|
||||
ReservedKeyword,
|
||||
Other,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ErrorKind<'a> {
|
||||
ReservedGeo(&'a str),
|
||||
GeoRadius,
|
||||
GeoBoundingBox,
|
||||
MisusedGeoRadius,
|
||||
MisusedGeoBoundingBox,
|
||||
InvalidPrimary,
|
||||
ExpectedEof,
|
||||
ExpectedValue(ExpectedValueKind),
|
||||
MalformedValue,
|
||||
InOpeningBracket,
|
||||
InClosingBracket,
|
||||
NonFiniteFloat,
|
||||
InExpectedValue(ExpectedValueKind),
|
||||
ReservedKeyword(String),
|
||||
MissingClosingDelimiter(char),
|
||||
Char(char),
|
||||
InternalError(error::ErrorKind),
|
||||
DepthLimitReached,
|
||||
External(String),
|
||||
}
|
||||
|
||||
impl<'a> Error<'a> {
|
||||
pub fn kind(&self) -> &ErrorKind<'a> {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
pub fn context(&self) -> &Span<'a> {
|
||||
&self.context
|
||||
}
|
||||
|
||||
pub fn new_from_kind(context: Span<'a>, kind: ErrorKind<'a>) -> Self {
|
||||
Self { context, kind }
|
||||
}
|
||||
|
||||
pub fn new_from_external(context: Span<'a>, error: impl std::error::Error) -> Self {
|
||||
Self::new_from_kind(context, ErrorKind::External(error.to_string()))
|
||||
}
|
||||
|
||||
pub fn char(self) -> char {
|
||||
match self.kind {
|
||||
ErrorKind::Char(c) => c,
|
||||
error => panic!("Internal filter parser error: {:?}", error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ParseError<Span<'a>> for Error<'a> {
|
||||
fn from_error_kind(input: Span<'a>, kind: error::ErrorKind) -> Self {
|
||||
let kind = match kind {
|
||||
error::ErrorKind::Eof => ErrorKind::ExpectedEof,
|
||||
kind => ErrorKind::InternalError(kind),
|
||||
};
|
||||
Self { context: input, kind }
|
||||
}
|
||||
|
||||
fn append(_input: Span<'a>, _kind: error::ErrorKind, other: Self) -> Self {
|
||||
other
|
||||
}
|
||||
|
||||
fn from_char(input: Span<'a>, c: char) -> Self {
|
||||
Self { context: input, kind: ErrorKind::Char(c) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for Error<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let input = self.context.fragment();
|
||||
// When printing our error message we want to escape all `\n` to be sure we keep our format with the
|
||||
// first line being the diagnostic and the second line being the incriminated filter.
|
||||
let escaped_input = input.escape_debug();
|
||||
|
||||
match &self.kind {
|
||||
ErrorKind::ExpectedValue(_) if input.trim().is_empty() => {
|
||||
writeln!(f, "Was expecting a value but instead got nothing.")?
|
||||
}
|
||||
ErrorKind::ExpectedValue(ExpectedValueKind::ReservedKeyword) => {
|
||||
writeln!(f, "Was expecting a value but instead got `{escaped_input}`, which is a reserved keyword. To use `{escaped_input}` as a field name or a value, surround it by quotes.")?
|
||||
}
|
||||
ErrorKind::ExpectedValue(ExpectedValueKind::Other) => {
|
||||
writeln!(f, "Was expecting a value but instead got `{}`.", escaped_input)?
|
||||
}
|
||||
ErrorKind::MalformedValue => {
|
||||
writeln!(f, "Malformed value: `{}`.", escaped_input)?
|
||||
}
|
||||
ErrorKind::MissingClosingDelimiter(c) => {
|
||||
writeln!(f, "Expression `{}` is missing the following closing delimiter: `{}`.", escaped_input, c)?
|
||||
}
|
||||
ErrorKind::InvalidPrimary => {
|
||||
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
||||
}
|
||||
ErrorKind::ExpectedEof => {
|
||||
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
||||
}
|
||||
ErrorKind::GeoRadius => {
|
||||
writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")?
|
||||
}
|
||||
ErrorKind::GeoBoundingBox => {
|
||||
writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")?
|
||||
}
|
||||
ErrorKind::ReservedGeo(name) => {
|
||||
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.", name.escape_debug())?
|
||||
}
|
||||
ErrorKind::MisusedGeoRadius => {
|
||||
writeln!(f, "The `_geoRadius` filter is an operation and can't be used as a value.")?
|
||||
}
|
||||
ErrorKind::MisusedGeoBoundingBox => {
|
||||
writeln!(f, "The `_geoBoundingBox` filter is an operation and can't be used as a value.")?
|
||||
}
|
||||
ErrorKind::ReservedKeyword(word) => {
|
||||
writeln!(f, "`{word}` is a reserved keyword and thus cannot be used as a field name unless it is put inside quotes. Use \"{word}\" or \'{word}\' instead.")?
|
||||
}
|
||||
ErrorKind::InOpeningBracket => {
|
||||
writeln!(f, "Expected `[` after `IN` keyword.")?
|
||||
}
|
||||
ErrorKind::InClosingBracket => {
|
||||
writeln!(f, "Expected matching `]` after the list of field names given to `IN[`")?
|
||||
}
|
||||
ErrorKind::NonFiniteFloat => {
|
||||
writeln!(f, "Non finite floats are not supported")?
|
||||
}
|
||||
ErrorKind::InExpectedValue(ExpectedValueKind::ReservedKeyword) => {
|
||||
writeln!(f, "Expected only comma-separated field names inside `IN[..]` but instead found `{escaped_input}`, which is a keyword. To use `{escaped_input}` as a field name or a value, surround it by quotes.")?
|
||||
}
|
||||
ErrorKind::InExpectedValue(ExpectedValueKind::Other) => {
|
||||
writeln!(f, "Expected only comma-separated field names inside `IN[..]` but instead found `{escaped_input}`.")?
|
||||
}
|
||||
ErrorKind::Char(c) => {
|
||||
panic!("Tried to display a char error with `{}`", c)
|
||||
}
|
||||
ErrorKind::DepthLimitReached => writeln!(
|
||||
f,
|
||||
"The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions."
|
||||
)?,
|
||||
ErrorKind::InternalError(kind) => writeln!(
|
||||
f,
|
||||
"Encountered an internal `{:?}` error while parsing your filter. Please fill an issue", kind
|
||||
)?,
|
||||
ErrorKind::External(ref error) => writeln!(f, "{}", error)?,
|
||||
}
|
||||
let base_column = self.context.get_utf8_column();
|
||||
let size = self.context.fragment().chars().count();
|
||||
|
||||
write!(f, "{}:{} {}", base_column, base_column + size, self.context.extra)
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user