Compare commits
646 Commits
v3.0.0-bet
...
v3.25.1
Author | SHA1 | Date | |
---|---|---|---|
a797494aa3 | |||
353dd7f796 | |||
1c00d64952 | |||
ff5cf3f4fa | |||
5b6b2f427a | |||
7877184bee | |||
e9cb37122e | |||
a425392a2b | |||
75acbcc115 | |||
30415cefbe | |||
1d06a0019f | |||
3686075a7f | |||
6c1c7e5cc0 | |||
c4f901b201 | |||
4b7acb1389 | |||
15b7169df4 | |||
861948bcf3 | |||
e5ffd39cf2 | |||
8b353da0d2 | |||
49bde82426 | |||
3e285aaec4 | |||
355fc576b1 | |||
a69d72aa20 | |||
e5d123c5d3 | |||
220eb33f88 | |||
5238850036 | |||
81ac963567 | |||
3c21a9a520 | |||
1dc1dd1f07 | |||
c9ea9bce81 | |||
9f08353d31 | |||
ce0c3626c2 | |||
06f46206db | |||
579f0c06af | |||
b12d92acc9 | |||
e700ce15e5 | |||
7dbef7d559 | |||
7e9cdd8b07 | |||
cee6bc6b5d | |||
cfd23c05b4 | |||
0c1acd72ca | |||
e2ca06dcca | |||
0828fd787d | |||
2e23ea68d4 | |||
4afa822bec | |||
f2ca9b40db | |||
4c2535cb22 | |||
d4ea8787c9 | |||
a4de04528a | |||
f60aae7499 | |||
de8f9e9eee | |||
cace9db12f | |||
ec2fb82836 | |||
afcfbf02ea | |||
cad04e07dd | |||
30f732138c | |||
04034bd03b | |||
6ec9a8d4c7 | |||
3f7882b467 | |||
a4511c1963 | |||
9d1f122717 | |||
5dd73d80d8 | |||
fce872bc1b | |||
df6c4c80c2 | |||
d2ff040cf8 | |||
a31af209cc | |||
3f8b3da52b | |||
6887f14ec6 | |||
3e0de5eaac | |||
61101a60f4 | |||
3529023bf9 | |||
d1d1a089a4 | |||
fa66358b1e | |||
2b533e4b91 | |||
d3530a8d80 | |||
6052eb3512 | |||
d17f7f7cad | |||
8bdc67ec3d | |||
4fabc27366 | |||
e4c7b0f17c | |||
5e8bfb017e | |||
7d20a01dba | |||
59dbf4496f | |||
12f40608e6 | |||
89832c296f | |||
f09bb88846 | |||
c518f59528 | |||
e9c74f9959 | |||
21b8e7f6e5 | |||
2ae9cd8634 | |||
cfee536b96 | |||
1c8fe3b24c | |||
84e23c397d | |||
f7baec2e65 | |||
378bab32f1 | |||
6cd8151cad | |||
541449e10f | |||
ca5a53fc24 | |||
f646d2a699 | |||
363e036bf0 | |||
e23f00f349 | |||
9600267bda | |||
a66b0e0151 | |||
3bfa00d5d2 | |||
6cbd2532cc | |||
47976af0d3 | |||
4dca52be85 | |||
62bb09300d | |||
f9e067abec | |||
1e62666406 | |||
0e0cdf15ef | |||
b124fdc092 | |||
5141b3c165 | |||
881d6e271e | |||
bd2418c438 | |||
8421c72c5c | |||
a80e21997c | |||
4369cbbac3 | |||
89f76d7899 | |||
ef68f84787 | |||
2c1f70fbe9 | |||
b2f5757f8d | |||
6b97b4eb20 | |||
645c10c11f | |||
571bcf07b0 | |||
63de65be45 | |||
a3446720a2 | |||
3c4c2ad4e0 | |||
077a525961 | |||
5be79eb26e | |||
ddc19ab699 | |||
ddfca5a29b | |||
c19166be1c | |||
daad61443c | |||
4b0c01158d | |||
f97f1d532e | |||
e15755fef0 | |||
ea88998325 | |||
74d971aa8a | |||
d41d868a8d | |||
555cc26cbf | |||
ab4215080b | |||
9502f5acd7 | |||
b03879403f | |||
ee4ac81677 | |||
b69fc8c306 | |||
ee6c31332d | |||
9fa16bd5fc | |||
c77ed5fcb0 | |||
822be17fb9 | |||
7e3b13ea2d | |||
f8fb48fb32 | |||
4bf46268da | |||
b7ea73b3c2 | |||
9fbc54314d | |||
cf8ab29a17 | |||
51cadd2d49 | |||
2bae8e129e | |||
9d55ad3af6 | |||
36cd504783 | |||
49f13b9b90 | |||
adb0739dfe | |||
340cb940e3 | |||
8711f2a1c5 | |||
7f35aab071 | |||
ecd167d2f9 | |||
220fd30830 | |||
5cba10446e | |||
a9bdb15205 | |||
c5f6a90f54 | |||
46f9aefb04 | |||
fdcad9c154 | |||
027025361a | |||
f1245153b9 | |||
570b8be022 | |||
86a773674a | |||
75fd0ee185 | |||
cc43238bd1 | |||
c0a6beecea | |||
c77eebb035 | |||
b1efb86b28 | |||
0707449c8f | |||
0f8a84f67e | |||
a475783b00 | |||
67413015e8 | |||
3a311a47af | |||
9ccd802126 | |||
0acba7cd22 | |||
3cdb8e7a81 | |||
d3efee2ea1 | |||
4ec274e748 | |||
3b07c72f88 | |||
0c5820a98f | |||
86beadc0ed | |||
be62d64dba | |||
112363031a | |||
48dc3552a6 | |||
663814c9ef | |||
bd892e6a63 | |||
4fd2c09845 | |||
0eab31bdf5 | |||
c6af22b97e | |||
b2a5110672 | |||
c628992ea6 | |||
c65d868e09 | |||
aeb48b2ecc | |||
cefec1a663 | |||
e7ad830aa8 | |||
b27eed265a | |||
3abe26473c | |||
023107226c | |||
8b109cfe40 | |||
b48e97d406 | |||
6c91cfeb90 | |||
bfd1f25972 | |||
8c0defce09 | |||
a1e88cfa05 | |||
443f5ffbcc | |||
b8bc94306d | |||
d9795ff22f | |||
c4108007cd | |||
f3db23a41e | |||
4741a75c92 | |||
301756ba03 | |||
3b2703a5e5 | |||
2a601f06cb | |||
adc3a56552 | |||
4d9a29bddd | |||
666e02f0c3 | |||
6aaec19c1c | |||
1091e1b740 | |||
d06c605421 | |||
43de823058 | |||
02d0aef611 | |||
5596661ce8 | |||
2379cb8d67 | |||
8c0ebe0841 | |||
fd868bac84 | |||
ebcbb29a0f | |||
00ff0a43a7 | |||
3d3f23ec9e | |||
d484219c48 | |||
dd4c97393e | |||
07b8ff25a7 | |||
0d5c3c5080 | |||
75b4429f73 | |||
34ef6bd18d | |||
c915313ec9 | |||
12a095a1d6 | |||
dc000f640a | |||
aa1c5b2be3 | |||
1d4ec3c50d | |||
ebfeef52f4 | |||
c595fd7f94 | |||
421052f88a | |||
603681fbe6 | |||
f442185aa5 | |||
ca9e739465 | |||
53a1c4283b | |||
93dd768234 | |||
c9c4d6bc7e | |||
81e10f8939 | |||
4dd753de52 | |||
79df63d319 | |||
ec54831162 | |||
c8f3e8ab4d | |||
4be8524d80 | |||
0d3146b51d | |||
f95d843969 | |||
28aee8c493 | |||
de3ea82eb9 | |||
268ba3d069 | |||
309d6558fb | |||
c08fdfc868 | |||
1b28e6af3e | |||
8655e33e60 | |||
50579fef84 | |||
e39299bfe2 | |||
d1ab2443f1 | |||
658cf368bb | |||
fd36ce59f6 | |||
95b3b87672 | |||
0d07d81802 | |||
923937b530 | |||
09492193c4 | |||
40b26a81a0 | |||
4293a0ba8c | |||
6c2f3486fc | |||
3c7512f64a | |||
84219d3d70 | |||
05d3727335 | |||
ee77c3b113 | |||
fcaf485e0b | |||
bd83469bb1 | |||
90f111b24f | |||
7d1034c569 | |||
236c17176c | |||
6ee4c10e8f | |||
3798634028 | |||
567ba5ccd4 | |||
ae2ee1821a | |||
805b1e4fa3 | |||
d92c10da56 | |||
6659f6d367 | |||
fe416ba15c | |||
de66708b24 | |||
2ca3e0b8bc | |||
ae04a0a760 | |||
c28168c970 | |||
46b2ed2507 | |||
22843ffc70 | |||
e1b6368343 | |||
62dae50d70 | |||
43a8ed472b | |||
d87878c232 | |||
ab7dee49b0 | |||
dca115506d | |||
be17fba0c6 | |||
cd58aa5efe | |||
946833d2cc | |||
eb42d09849 | |||
9d00492750 | |||
b6711d6ab9 | |||
7bc46de8aa | |||
a4f4fb2d73 | |||
a181b56ea7 | |||
d0b743d955 | |||
a985b748e9 | |||
44cb8aaafe | |||
51f5d1b3c4 | |||
36e0d6f787 | |||
3d0065bdcf | |||
7bf8071095 | |||
30d39f8e10 | |||
20d3ef7de6 | |||
86e5dae4d1 | |||
d89b1d4871 | |||
080e6fb22a | |||
e1cd71616d | |||
c92e11dad5 | |||
b52e8747fa | |||
14305748f0 | |||
44f8112e53 | |||
6a90b1d40a | |||
b42ec3e810 | |||
28875ce304 | |||
9b99e8ab70 | |||
98872a8fdb | |||
ce4a295008 | |||
bc1babb5b5 | |||
d61242d85d | |||
99d7105357 | |||
be8a9c5f07 | |||
530e74c70b | |||
0a337756ba | |||
26fe0a7684 | |||
9c7e451c03 | |||
8df1455f25 | |||
9d9377f65d | |||
8b523fab8b | |||
6453ae0968 | |||
1cfd47a258 | |||
8e2069c554 | |||
6b8778a63c | |||
aaa8c440fe | |||
2dc5dec83c | |||
1eca2b83ed | |||
48e6f3bb23 | |||
0ad9e17196 | |||
9398cdaac1 | |||
2f19d4a834 | |||
99a186d01b | |||
40ef233d24 | |||
7c3ea193ff | |||
7902b646ff | |||
1c453ae147 | |||
cf5714ba73 | |||
d655340634 | |||
8d4ac031c3 | |||
a1ded3a339 | |||
4a0e47dbac | |||
510d266da8 | |||
35dfb36884 | |||
b88f4d2ba6 | |||
50318da879 | |||
575487a0e2 | |||
69d3ccaed2 | |||
170859a112 | |||
7fdcb106a5 | |||
14d4ddb752 | |||
428e59a844 | |||
1c8d895fc0 | |||
fbf3fb825b | |||
16e07ae016 | |||
d1b9db38c7 | |||
395f0fc5f3 | |||
143e4cd077 | |||
f777a2fab4 | |||
dad3012ec3 | |||
d45209edb2 | |||
e89489453d | |||
ed6c8194a7 | |||
83fe17c6ec | |||
c00dcc8f39 | |||
e118f4a3b9 | |||
5e28d0f96a | |||
3af23f6792 | |||
3a41b929c9 | |||
105f22969c | |||
e4a88a7c13 | |||
b0255040c6 | |||
f1e842e12a | |||
d756cf3e9f | |||
146619134d | |||
372030071e | |||
62a06fa0f9 | |||
e2bcca2fbd | |||
4568af9542 | |||
b50d486a63 | |||
0ae3fc608b | |||
6024e8d832 | |||
f38f4f401b | |||
3b2ae85009 | |||
faf4150d1e | |||
fb64f00640 | |||
3d336b328a | |||
f9cf29e0b6 | |||
cbd038f30f | |||
2aeb75a779 | |||
2f8eaf6bea | |||
fb7a5dec1b | |||
e61bac039a | |||
b3be9ef428 | |||
5a6b600ace | |||
e58ca686e3 | |||
6f4b1ba4b3 | |||
cdc45630ae | |||
7947ff1ae4 | |||
33bae52fa1 | |||
3ee45c69a7 | |||
179d285564 | |||
a2e8e96c71 | |||
5043815d48 | |||
1640f06e13 | |||
62ea93837c | |||
446f82888c | |||
6f1aeb47fd | |||
1f7c1b4f43 | |||
3fa0217c4b | |||
2dd30f2b77 | |||
6e23c8b4c0 | |||
72aa63adce | |||
e65e8be59e | |||
7aa4dfb240 | |||
bd324233a0 | |||
f1a9b68022 | |||
dda1da4576 | |||
5b7aa9c1cf | |||
a28aaceaad | |||
2bb200af87 | |||
97f1efbb72 | |||
bf8b6f4c2c | |||
bd33c200dc | |||
bc6baf1be0 | |||
dc8d5106f9 | |||
8c0dfe2f3d | |||
4e1be9bee6 | |||
4c5285e094 | |||
0838feeb82 | |||
ae791c8634 | |||
09f480318c | |||
4c5be5f07f | |||
9c1ffdbb82 | |||
18a63e34dd | |||
ff0bcfef8a | |||
4980b71ba3 | |||
b5bf5f4325 | |||
f9788ea7cf | |||
83644dab85 | |||
d94cf72da2 | |||
e98561ceb1 | |||
76f37373e0 | |||
61a06992c3 | |||
ddcba93eea | |||
bb969d8dc6 | |||
2383e851e2 | |||
330a767fd7 | |||
2b902de6fd | |||
85e1350af8 | |||
c09800790b | |||
25fd343069 | |||
518487e3df | |||
a02d9c8463 | |||
8beeba7c0c | |||
50fb49f0c3 | |||
4dcaa24758 | |||
3fbdf6f022 | |||
aa9ba289bb | |||
3b6d8987db | |||
6e3df9f847 | |||
efe0e6af22 | |||
00de9bf16d | |||
1743110a70 | |||
0352a8e028 | |||
c601bb794b | |||
42865486f1 | |||
44f5cf40ef | |||
c3ab378ac5 | |||
cdcbfb24c4 | |||
e05e2fd663 | |||
6639cab1ae | |||
8241f0999a | |||
f3a5e3702d | |||
46701a176d | |||
26a29f20c3 | |||
18cd45d257 | |||
f0a533a77a | |||
619a9aeb6c | |||
7bfa5876ed | |||
f95ab6ee57 | |||
e75f19e9c0 | |||
1c212f6c30 | |||
141419056d | |||
aabfe49cb9 | |||
a3b631f9e9 | |||
18165eb50d | |||
061c462f0b | |||
5f79d665d9 | |||
f0cc0a76a9 | |||
dd4674e486 | |||
0019959eec | |||
3e9c38697d | |||
e3b7c41199 | |||
a2c808c8ce | |||
da7e17aa38 | |||
02df3759df | |||
4fef500795 | |||
07ece452b3 | |||
b8cf02ca68 | |||
3db798a82a | |||
45cc0cedbd | |||
2efade123e | |||
fc393f743f | |||
0e99e7e9b9 | |||
7a95850c1b | |||
549355bb29 | |||
55aa8ee3b1 | |||
1c22fc367e | |||
5ea8d62aa4 | |||
baebc2fbe9 | |||
8c69260972 | |||
30f992c6a8 | |||
dcaaae366b | |||
284035823f | |||
be8ff92414 | |||
a4c846a424 | |||
451e418b18 | |||
4e13b1a83c | |||
9d2e9887af | |||
dc73c2e97d | |||
a624121095 | |||
9d9c79179b | |||
b7479651e1 | |||
2fc0ccbfe0 | |||
f86ad1dce4 | |||
f0181d92cd | |||
5ac6a30c56 | |||
96d8a382e8 | |||
7c32af4649 | |||
03dbb3a403 | |||
a570e4c7a0 | |||
539c47bd3b | |||
b6d9018ebd | |||
c929888e39 | |||
af946ff13e | |||
0039dc18e1 | |||
4d6ab53336 | |||
c7f6684eed | |||
b71ecc8e89 | |||
3537153b91 | |||
9382f66f87 | |||
656f5f112c | |||
9181861f47 | |||
1ab73e0742 | |||
57686d9df1 | |||
ca177cc3b9 | |||
d8dc8d8623 | |||
5548ab62ac | |||
d6d82c3138 | |||
2185839236 | |||
24d58f278a | |||
f80be96cf9 | |||
6c89c6c8ae | |||
b74b55fa4a | |||
09564102e7 | |||
d436a6e676 | |||
bec3a327a7 | |||
d329df70f3 | |||
1af9f4061e | |||
0d012f85cb | |||
e3b213c398 | |||
d9f0603271 | |||
86a625cb40 | |||
f22232de5d | |||
7ad3748a46 | |||
66b2562d03 | |||
b197322cd8 | |||
9e5ef974a7 | |||
08a001fbd1 | |||
54ae6dce0b | |||
a90ef201c7 | |||
2de0da87fa | |||
53e08e75fe | |||
6b5236f52e | |||
78e34f0d9f | |||
6aedd0f425 | |||
5ff0d850d7 | |||
cd73e34ccc | |||
107462e42e | |||
e6c2d22700 | |||
889ddcef7e | |||
68a6a0c40e | |||
969018db37 | |||
fba1471ec4 | |||
8b72ac7f80 | |||
77a6aa487b | |||
fd99c2197b | |||
9c91f062b9 | |||
537ca030b2 | |||
b00dcdec0d | |||
57bcd376b4 | |||
8d4d8648c6 | |||
35d177b67b | |||
40882443c2 | |||
05f19cad78 | |||
7249f277b2 | |||
849124f177 | |||
f5c7a11da5 | |||
043a79189d | |||
5ed43fd17d | |||
220cd4d6b8 | |||
f692e6c011 | |||
f48365929e | |||
56219bf096 | |||
5ad3849bb6 | |||
4af9124162 |
@ -1,98 +0,0 @@
|
||||
{
|
||||
"files": [
|
||||
"CONTRIBUTORS.md"
|
||||
],
|
||||
"imageSize": 100,
|
||||
"commit": false,
|
||||
"contributors": [
|
||||
{
|
||||
"login": "Xhofe",
|
||||
"name": "Xhofe",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/36558727?v=4",
|
||||
"profile": "http://nn.ci",
|
||||
"contributions": [
|
||||
"code",
|
||||
"ideas",
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "foxxorcat",
|
||||
"name": "foxxorcat",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/95907542?v=4",
|
||||
"profile": "https://github.com/foxxorcat",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "DaoChen6",
|
||||
"name": "道辰",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/63903027?v=4",
|
||||
"profile": "https://www.iflu.cf/",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "vg-land",
|
||||
"name": "vg-land",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/16739728?v=4",
|
||||
"profile": "https://vg-land.github.io/",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Clansty",
|
||||
"name": "凌莞~(=^▽^=)",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/18461360?v=4",
|
||||
"profile": "https://c5y.moe",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Windman1320",
|
||||
"name": "Windman",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/9999486?v=4",
|
||||
"profile": "https://github.com/Windman1320",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "ericarena",
|
||||
"name": "ericarena",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/4518927?v=4",
|
||||
"profile": "https://github.com/ericarena",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "WntFlm",
|
||||
"name": "WntFlm",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/34620278?v=4",
|
||||
"profile": "https://github.com/WntFlm",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "XZB-1248",
|
||||
"name": "XZB-1248",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/28593573?v=4",
|
||||
"profile": "https://github.com/XZB-1248",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
"projectName": "alist",
|
||||
"projectOwner": "alist-org",
|
||||
"repoType": "github",
|
||||
"repoHost": "https://github.com",
|
||||
"skipCi": true
|
||||
}
|
13
.github/FUNDING.yml
vendored
Normal file
13
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
custom: ['https://alist.nn.ci/guide/sponsor.html']
|
47
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
47
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -7,27 +7,44 @@ body:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report, please **confirm that your issue is not a duplicate issue and not because of your operation or version issues**
|
||||
感谢您花时间填写此错误报告,请**务必确认您的issue不是重复的且不是因为您的操作或版本问题**
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Please make sure of the following things
|
||||
description: You may select more than one, even select all.
|
||||
description: |
|
||||
You must check all the following, otherwise your issue may be closed directly. Or you can go to the [discussions](https://github.com/alist-org/alist/discussions)
|
||||
您必须勾选以下所有内容,否则您的issue可能会被直接关闭。或者您可以去[讨论区](https://github.com/alist-org/alist/discussions)
|
||||
options:
|
||||
- label: I have read the [documentation](https://alist-doc.nn.ci).
|
||||
- label: I'm sure there are no duplicate issues or discussions.
|
||||
- label: I'm sure it's due to `alist` and not something else(such as `Dependencies` or `Operational`).
|
||||
- label: |
|
||||
I have read the [documentation](https://alist.nn.ci).
|
||||
我已经阅读了[文档](https://alist.nn.ci)。
|
||||
- label: |
|
||||
I'm sure there are no duplicate issues or discussions.
|
||||
我确定没有重复的issue或讨论。
|
||||
- label: |
|
||||
I'm sure it's due to `AList` and not something else(such as `Dependencies` or `Operational`).
|
||||
我确定是`AList`的问题,而不是其他原因(例如`依赖`或`操作`)。
|
||||
- label: |
|
||||
I'm sure this issue is not fixed in the latest version.
|
||||
我确定这个问题在最新版本中没有被修复。
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Alist Version / Alist 版本
|
||||
description: What version of our software are you running?
|
||||
placeholder: v2.0.0
|
||||
label: AList Version / AList 版本
|
||||
description: |
|
||||
What version of our software are you running? Do not use `latest` or `master` as an answer.
|
||||
您使用的是哪个版本的软件?请不要使用`latest`或`master`作为答案。
|
||||
placeholder: v3.xx.xx
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: driver
|
||||
attributes:
|
||||
label: Driver used / 使用的存储驱动
|
||||
description: What storage driver are you using?
|
||||
description: |
|
||||
What storage driver are you using?
|
||||
您使用的是哪个存储驱动?
|
||||
placeholder: "for example: Onedrive"
|
||||
validations:
|
||||
required: true
|
||||
@ -42,8 +59,17 @@ body:
|
||||
attributes:
|
||||
label: Reproduction / 复现链接
|
||||
description: |
|
||||
Please provide a link to a repo that can reproduce the problem you ran into.
|
||||
请提供能复现此问题的链接
|
||||
Please provide a link to a repo that can reproduce the problem you ran into. Please be aware that your issue may be closed directly if you don't provide it.
|
||||
请提供能复现此问题的链接,请知悉如果不提供它你的issue可能会被直接关闭。
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Config / 配置
|
||||
description: |
|
||||
Please provide the configuration file of your `AList` application and take a screenshot of the relevant storage configuration. (hide privacy field)
|
||||
请提供您的`AList`应用的配置文件,并截图相关存储配置。(隐藏隐私字段)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@ -53,4 +79,3 @@ body:
|
||||
description: |
|
||||
Please copy and paste any relevant log output.
|
||||
请复制粘贴错误日志,或者截图
|
||||
render: shell
|
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@ -7,7 +7,7 @@ body:
|
||||
label: Please make sure of the following things
|
||||
description: You may select more than one, even select all.
|
||||
options:
|
||||
- label: I have read the [documentation](https://alist-doc.nn.ci).
|
||||
- label: I have read the [documentation](https://alist.nn.ci).
|
||||
- label: I'm sure there are no duplicate issues or discussions.
|
||||
- label: I'm sure this feature is not implemented.
|
||||
- label: I'm sure it's a reasonable and popular requirement.
|
||||
|
19
.github/stale.yml
vendored
Normal file
19
.github/stale.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 44
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 20
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- accepted
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: >
|
||||
This issue was closed due to inactive more than 52 days. You can reopen or
|
||||
recreate it if you think it should continue. Thank you for your contributions again.
|
15
.github/workflows/auto_lang.yml
vendored
15
.github/workflows/auto_lang.yml
vendored
@ -7,6 +7,8 @@ on:
|
||||
paths:
|
||||
- 'drivers/**'
|
||||
- 'internal/bootstrap/data/setting.go'
|
||||
- 'internal/conf/const.go'
|
||||
- 'cmd/lang.go'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@ -14,17 +16,17 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ 1.19 ]
|
||||
go-version: [ '1.20' ]
|
||||
name: auto generate lang.json
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Setup go
|
||||
uses: actions/setup-go@v2
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout alist
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: alist
|
||||
|
||||
@ -42,6 +44,7 @@ jobs:
|
||||
cd alist
|
||||
go run ./main.go lang
|
||||
cd ..
|
||||
|
||||
- name: Copy lang file
|
||||
run: |
|
||||
cp -f ./alist/lang/*.json ./alist-web/src/lang/en/ 2>/dev/null || :
|
||||
@ -50,8 +53,8 @@ jobs:
|
||||
run: |
|
||||
cd alist-web
|
||||
git add .
|
||||
git config --local user.email "i@nn.ci"
|
||||
git config --local user.name "Noah Hsu"
|
||||
git config --local user.email "bot@nn.ci"
|
||||
git config --local user.name "IlaBot"
|
||||
git commit -m "chore: auto update i18n file" -a 2>/dev/null || :
|
||||
cd ..
|
||||
|
||||
@ -61,4 +64,4 @@ jobs:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
directory: alist-web
|
||||
repository: alist-org/alist-web
|
||||
repository: alist-org/alist-web
|
||||
|
15
.github/workflows/build.yml
vendored
15
.github/workflows/build.yml
vendored
@ -2,21 +2,21 @@ name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ '**' ]
|
||||
branches: [ 'main' ]
|
||||
pull_request:
|
||||
branches: [ '**' ]
|
||||
branches: [ 'main' ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [1.18]
|
||||
go-version: [ '1.20' ]
|
||||
name: Build
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v2
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
@ -25,8 +25,9 @@ jobs:
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
docker pull techknowlogick/xgo:latest
|
||||
go install src.techknowlogick.com/xgo@latest
|
||||
sudo snap install zig --classic --beta
|
||||
docker pull crazymax/xgo:latest
|
||||
go install github.com/crazy-max/xgo@latest
|
||||
sudo apt install upx
|
||||
|
||||
- name: Build
|
||||
@ -34,7 +35,7 @@ jobs:
|
||||
bash build.sh dev
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: alist
|
||||
path: dist
|
42
.github/workflows/build_docker.yml
vendored
42
.github/workflows/build_docker.yml
vendored
@ -6,34 +6,60 @@ on:
|
||||
|
||||
jobs:
|
||||
build_docker:
|
||||
name: Docker
|
||||
name: Build docker
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: xhofe/alist
|
||||
- name: Replace release with dev
|
||||
run: |
|
||||
sed -i 's/release/dev/g' Dockerfile
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: xhofe
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
build_docker_with_aria2:
|
||||
needs: build_docker
|
||||
name: Build docker with aria2
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: alist-org/with_aria2
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Commit
|
||||
run: |
|
||||
git config --local user.email "bot@nn.ci"
|
||||
git config --local user.name "IlaBot"
|
||||
git commit --allow-empty -m "Trigger build for ${{ github.sha }}"
|
||||
|
||||
- name: Push commit
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
repository: alist-org/with_aria2
|
19
.github/workflows/changelog.yml
vendored
Normal file
19
.github/workflows/changelog.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: auto changelog
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
16
.github/workflows/issue_check_inactive.yml
vendored
16
.github/workflows/issue_check_inactive.yml
vendored
@ -1,16 +0,0 @@
|
||||
name: Check inactive
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 1 * *"
|
||||
|
||||
jobs:
|
||||
check-inactive:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: check-inactive
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: 'check-inactive'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
inactive-day: 30
|
@ -1,11 +1,12 @@
|
||||
name: Check need info
|
||||
name: Close need info
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 */7 * *"
|
||||
- cron: "0 0 */1 * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-need-info:
|
||||
close-need-info:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: close-issues
|
||||
@ -14,8 +15,8 @@ jobs:
|
||||
actions: 'close-issues'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
labels: 'question'
|
||||
inactive-day: 7
|
||||
inactive-day: 3
|
||||
close-reason: 'not_planned'
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 7 days.
|
||||
你好 @${{ github.event.issue.user.login }},此issue因超过7天未回复被关闭。
|
||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 3 days.
|
||||
你好 @${{ github.event.issue.user.login }},此issue因超过3天未回复被关闭。
|
21
.github/workflows/issue_close_stale.yml
vendored
Normal file
21
.github/workflows/issue_close_stale.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
name: Close inactive
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 */7 * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
close-inactive:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: close-issues
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: 'close-issues'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
labels: 'stale'
|
||||
inactive-day: 8
|
||||
close-reason: 'not_planned'
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 52 days. You can reopen or recreate it if you think it should continue. Thank you for your contributions again.
|
2
.github/workflows/issue_duplicate.yml
vendored
2
.github/workflows/issue_duplicate.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
if: github.event.label.name == 'duplicate'
|
||||
steps:
|
||||
- name: Create comment
|
||||
uses: actions-cool/issues-helper@v2
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: 'create-comment'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
2
.github/workflows/issue_invalid.yml
vendored
2
.github/workflows/issue_invalid.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
if: github.event.label.name == 'invalid'
|
||||
steps:
|
||||
- name: Create comment
|
||||
uses: actions-cool/issues-helper@v2
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: 'create-comment'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
6
.github/workflows/issue_question.yml
vendored
6
.github/workflows/issue_question.yml
vendored
@ -10,11 +10,11 @@ jobs:
|
||||
if: github.event.label.name == 'question'
|
||||
steps:
|
||||
- name: Create comment
|
||||
uses: actions-cool/issues-helper@v2.0.0
|
||||
uses: actions-cool/issues-helper@v3.5.1
|
||||
with:
|
||||
actions: 'create-comment'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}, please input issue by template and add detail. Issues labeled by `question` will be closed if no activities in 7 days.
|
||||
你好 @${{ github.event.issue.user.login }},请按照issue模板填写, 并详细说明问题/复现步骤/复现链接/实现思路或提供更多信息等, 7天内未回复issue自动关闭。
|
||||
Hello @${{ github.event.issue.user.login }}, please input issue by template and add detail. Issues labeled by `question` will be closed if no activities in 3 days.
|
||||
你好 @${{ github.event.issue.user.login }},请按照issue模板填写, 并详细说明问题/日志记录/复现步骤/复现链接/实现思路或提供更多信息等, 3天内未回复issue自动关闭。
|
17
.github/workflows/issue_rm_working.yml
vendored
Normal file
17
.github/workflows/issue_rm_working.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
name: Remove working label when issue closed
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
rm-working:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Remove working label
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: 'remove-labels'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: 'working'
|
3
.github/workflows/issue_similarity.yml
vendored
3
.github/workflows/issue_similarity.yml
vendored
@ -15,4 +15,5 @@ jobs:
|
||||
comment-title: '### See'
|
||||
comment-body: '${index}. ${similarity} #${number}'
|
||||
show-footer: false
|
||||
show-mentioned: true
|
||||
show-mentioned: true
|
||||
since-days: 730
|
2
.github/workflows/issue_wontfix.yml
vendored
2
.github/workflows/issue_wontfix.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
if: github.event.label.name == 'wontfix'
|
||||
steps:
|
||||
- name: Create comment
|
||||
uses: actions-cool/issues-helper@v2
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: 'create-comment'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
69
.github/workflows/release.yml
vendored
69
.github/workflows/release.yml
vendored
@ -1,52 +1,75 @@
|
||||
name: release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
||||
release:
|
||||
needs: changelog
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [1.18]
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.20' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Prerelease
|
||||
uses: irongut/EditRelease@v1.2.0
|
||||
with:
|
||||
token: ${{ secrets.MY_TOKEN }}
|
||||
id: ${{ github.event.release.id }}
|
||||
prerelease: true
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v2
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
docker pull techknowlogick/xgo:latest
|
||||
go install src.techknowlogick.com/xgo@latest
|
||||
sudo snap install zig --classic --beta
|
||||
docker pull crazymax/xgo:latest
|
||||
go install github.com/crazy-max/xgo@latest
|
||||
sudo apt install upx
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release
|
||||
|
||||
- name: Release
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: build/compress/*
|
||||
files: build/compress/*
|
||||
prerelease: false
|
||||
|
||||
release_desktop:
|
||||
needs: release
|
||||
name: Release desktop
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: alist-org/desktop-release
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Add tag
|
||||
run: |
|
||||
git config --local user.email "bot@nn.ci"
|
||||
git config --local user.name "IlaBot"
|
||||
version=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||
git tag -a $version -m "release $version"
|
||||
|
||||
- name: Push tags
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
repository: alist-org/desktop-release
|
42
.github/workflows/release_docker.yml
vendored
42
.github/workflows/release_docker.yml
vendored
@ -7,36 +7,62 @@ on:
|
||||
|
||||
jobs:
|
||||
release_docker:
|
||||
name: Docker
|
||||
name: Release Docker
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: xhofe/alist
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: xhofe
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
release_docker_with_aria2:
|
||||
needs: release_docker
|
||||
name: Release docker with aria2
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: alist-org/with_aria2
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Add tag
|
||||
run: |
|
||||
git config --local user.email "bot@nn.ci"
|
||||
git config --local user.name "IlaBot"
|
||||
git tag -a ${{ github.ref_name }} -m "release ${{ github.ref_name }}"
|
||||
|
||||
- name: Push tags
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
repository: alist-org/with_aria2
|
||||
|
34
.github/workflows/release_linux_musl_arm.yml
vendored
Normal file
34
.github/workflows/release_linux_musl_arm.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: release_linux_musl_arm
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
jobs:
|
||||
release_arm:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.20' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release linux_musl_arm
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: build/compress/*
|
16
.gitignore
vendored
16
.gitignore
vendored
@ -20,10 +20,14 @@ output/
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
bin/*
|
||||
/bin/*
|
||||
*.json
|
||||
data/
|
||||
log/
|
||||
lang/
|
||||
public/dist/*
|
||||
!public/dist/README.md
|
||||
/build
|
||||
/data/
|
||||
/log/
|
||||
/lang/
|
||||
/daemon/
|
||||
/public/dist/*
|
||||
/!public/dist/README.md
|
||||
|
||||
.VSCodeCounter
|
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
i@nn.ci.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
@ -6,8 +6,8 @@
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- [git](https://nodejs.org/zh-cn/)
|
||||
- [Go 1.18+](https://golang.org/doc/install)
|
||||
- [git](https://git-scm.com)
|
||||
- [Go 1.20+](https://golang.org/doc/install)
|
||||
- [gcc](https://gcc.gnu.org/)
|
||||
- [nodejs](https://nodejs.org/)
|
||||
|
||||
|
@ -1,33 +0,0 @@
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||
[](#contributors-)
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
|
||||
## Contributors ✨
|
||||
|
||||
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center"><a href="http://nn.ci"><img src="https://avatars.githubusercontent.com/u/36558727?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Xhofe</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=Xhofe" title="Code">💻</a> <a href="#ideas-Xhofe" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/alist-org/alist/commits?author=Xhofe" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://github.com/foxxorcat"><img src="https://avatars.githubusercontent.com/u/95907542?v=4?s=100" width="100px;" alt=""/><br /><sub><b>foxxorcat</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=foxxorcat" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://www.iflu.cf/"><img src="https://avatars.githubusercontent.com/u/63903027?v=4?s=100" width="100px;" alt=""/><br /><sub><b>道辰</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=DaoChen6" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://vg-land.github.io/"><img src="https://avatars.githubusercontent.com/u/16739728?v=4?s=100" width="100px;" alt=""/><br /><sub><b>vg-land</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=vg-land" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://c5y.moe"><img src="https://avatars.githubusercontent.com/u/18461360?v=4?s=100" width="100px;" alt=""/><br /><sub><b>凌莞~(=^▽^=)</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=Clansty" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://github.com/Windman1320"><img src="https://avatars.githubusercontent.com/u/9999486?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Windman</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=Windman1320" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/ericarena"><img src="https://avatars.githubusercontent.com/u/4518927?v=4?s=100" width="100px;" alt=""/><br /><sub><b>ericarena</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=ericarena" title="Code">💻</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/WntFlm"><img src="https://avatars.githubusercontent.com/u/34620278?v=4?s=100" width="100px;" alt=""/><br /><sub><b>WntFlm</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=WntFlm" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/XZB-1248"><img src="https://avatars.githubusercontent.com/u/28593573?v=4?s=100" width="100px;" alt=""/><br /><sub><b>XZB-1248</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=XZB-1248" title="Code">💻</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<!-- markdownlint-restore -->
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
||||
|
||||
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
|
14
Dockerfile
14
Dockerfile
@ -1,14 +1,18 @@
|
||||
FROM alpine:edge as builder
|
||||
FROM alpine:3.18 as builder
|
||||
LABEL stage=go-builder
|
||||
WORKDIR /app/
|
||||
COPY ./ ./
|
||||
RUN apk add --no-cache bash git go gcc musl-dev curl; \
|
||||
RUN apk add --no-cache bash curl gcc git go musl-dev; \
|
||||
bash build.sh release docker
|
||||
|
||||
FROM alpine:edge
|
||||
FROM alpine:3.18
|
||||
LABEL MAINTAINER="i@nn.ci"
|
||||
VOLUME /opt/alist/data/
|
||||
WORKDIR /opt/alist/
|
||||
COPY --from=builder /app/bin/alist ./
|
||||
EXPOSE 5244
|
||||
CMD [ "./alist", "server", "--no-prefix" ]
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
chmod +x /entrypoint.sh
|
||||
ENV PUID=0 PGID=0 UMASK=022
|
||||
EXPOSE 5244 5245
|
||||
CMD [ "/entrypoint.sh" ]
|
||||
|
68
README.md
68
README.md
@ -1,35 +1,45 @@
|
||||
<div align="center">
|
||||
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||
<p><em>🗂️A file list program that supports multiple storage, powered by Gin and Solidjs.</em></p>
|
||||
<p><em>🗂️A file list program that supports multiple storages, powered by Gin and Solidjs.</em></p>
|
||||
<div>
|
||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/discussions">
|
||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||
<img src="https://img.shields.io/github/workflow/status/Xhofe/alist/build" alt="Build status" />
|
||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA" alt="Downloads" />
|
||||
</a>
|
||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||
<img src="https://badges.crowdin.net/alist/localized.svg">
|
||||
</a>
|
||||
<a href="https://pay.xhofe.top">
|
||||
<img src="https://img.shields.io/badge/%24-sponsor-ff69b4.svg" alt="sponsor" />
|
||||
</div>
|
||||
<div>
|
||||
<a href="https://github.com/Xhofe/alist/discussions">
|
||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://discord.gg/F4ymsH4xv2">
|
||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||
<img src="https://img.shields.io/docker/pulls/xhofe/alist?color=%2348BB78&logo=docker&label=pulls" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://alist.nn.ci/guide/sponsor.html">
|
||||
<img src="https://img.shields.io/badge/%24-sponsor-F87171.svg" alt="sponsor" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
English | [中文](./README_cn.md) | [Contributors](./CONTRIBUTORS.md) | [Contributing](./CONTRIBUTING.md)
|
||||
English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||
|
||||
## Features
|
||||
|
||||
@ -43,6 +53,7 @@ English | [中文](./README_cn.md) | [Contributors](./CONTRIBUTORS.md) | [Contri
|
||||
- [x] FTP / SFTP
|
||||
- [x] [PikPak](https://www.mypikpak.com/)
|
||||
- [x] [S3](https://aws.amazon.com/s3/)
|
||||
- [x] [Seafile](https://seafile.com/)
|
||||
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
|
||||
- [x] WebDav(Support OneDrive/SharePoint without API)
|
||||
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
||||
@ -50,7 +61,19 @@ English | [中文](./README_cn.md) | [Contributors](./CONTRIBUTORS.md) | [Contri
|
||||
- [x] [139yun](https://yun.139.com/) (Personal, Family)
|
||||
- [x] [YandexDisk](https://disk.yandex.com/)
|
||||
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
||||
- [x] [Terabox](https://www.terabox.com/main)
|
||||
- [x] [UC](https://drive.uc.cn)
|
||||
- [x] [Quark](https://pan.quark.cn)
|
||||
- [x] [Thunder](https://pan.xunlei.com)
|
||||
- [x] [Lanzou](https://www.lanzou.com/)
|
||||
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
|
||||
- [x] [Google photo](https://photos.google.com/)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [Baidu photo](https://photo.baidu.com/)
|
||||
- [x] SMB
|
||||
- [x] [115](https://115.com/)
|
||||
- [X] Cloudreve
|
||||
- [x] [Dropbox](https://www.dropbox.com/)
|
||||
- [x] Easy to deploy and out-of-the-box
|
||||
- [x] File preview (PDF, markdown, code, plain text, ...)
|
||||
- [x] Image preview in gallery mode
|
||||
@ -68,6 +91,7 @@ English | [中文](./README_cn.md) | [Contributors](./CONTRIBUTORS.md) | [Contri
|
||||
- [x] Web upload(Can allow visitors to upload), delete, mkdir, rename, move and copy
|
||||
- [x] Offline download
|
||||
- [x] Copy files between two storage
|
||||
- [x] Multi-thread downloading acceleration for single-thread download/stream
|
||||
|
||||
## Document
|
||||
|
||||
@ -75,17 +99,29 @@ English | [中文](./README_cn.md) | [Contributors](./CONTRIBUTORS.md) | [Contri
|
||||
|
||||
## Demo
|
||||
|
||||
<https://pan.nn.ci>
|
||||
<https://al.nn.ci>
|
||||
|
||||
## Discussion
|
||||
|
||||
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature request only.**
|
||||
|
||||
## Special sponsors
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.la/)
|
||||
- [KinhDown 百度云盘不限速下载!永久免费!已稳定运行3年!非常可靠!](https://kinhdown.com/?Type=Tutorials)
|
||||
## Sponsor
|
||||
|
||||
AList is an open-source software, if you happen to like this project and want me to keep going, please consider sponsoring me or providing a single donation! Thanks for all the love and support:
|
||||
https://alist.nn.ci/guide/sponsor.html
|
||||
|
||||
### Special sponsors
|
||||
|
||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||
|
||||
## Contributors
|
||||
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
|
||||
## License
|
||||
|
||||
The `AList` is open-source software licensed under the AGPL-3.0 license.
|
||||
@ -99,4 +135,4 @@ The `AList` is open-source software licensed under the AGPL-3.0 license.
|
||||
|
||||
---
|
||||
|
||||
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@QQGroup](https://jq.qq.com/?_wv=1027&k=YJJj2Gwb)
|
||||
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
|
72
README_cn.md
72
README_cn.md
@ -1,37 +1,47 @@
|
||||
<div align="center">
|
||||
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||
<p><em>🗂一个支持多存储的文件列表程序,使用 Gin 和 Solidjs。</em></p>
|
||||
<div>
|
||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/discussions">
|
||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||
<img src="https://img.shields.io/github/workflow/status/Xhofe/alist/build" alt="Build status" />
|
||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA" alt="Downloads" />
|
||||
</a>
|
||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||
<img src="https://badges.crowdin.net/alist/localized.svg">
|
||||
</a>
|
||||
<a href="https://pay.xhofe.top">
|
||||
<img src="https://img.shields.io/badge/%24-sponsor-ff69b4.svg" alt="sponsor" />
|
||||
</div>
|
||||
<div>
|
||||
<a href="https://github.com/Xhofe/alist/discussions">
|
||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://discord.gg/F4ymsH4xv2">
|
||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||
<img src="https://img.shields.io/docker/pulls/xhofe/alist?color=%2348BB78&logo=docker&label=pulls" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://alist.nn.ci/zh/guide/sponsor.html">
|
||||
<img src="https://img.shields.io/badge/%24-sponsor-F87171.svg" alt="sponsor" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
[English](./README.md) | 中文 | [Contributors](./CONTRIBUTORS.md) | [Contributing](./CONTRIBUTING.md)
|
||||
[English](./README.md) | 中文 | [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||
|
||||
## Features
|
||||
## 功能
|
||||
|
||||
- [x] 多种存储
|
||||
- [x] 本地存储
|
||||
@ -43,6 +53,7 @@
|
||||
- [x] FTP / SFTP
|
||||
- [x] [PikPak](https://www.mypikpak.com/)
|
||||
- [x] [S3](https://aws.amazon.com/cn/s3/)
|
||||
- [x] [Seafile](https://seafile.com/)
|
||||
- [x] [又拍云对象存储](https://www.upyun.com/products/file-storage)
|
||||
- [x] WebDav(支持无API的OneDrive/SharePoint)
|
||||
- [x] Teambition([中国](https://www.teambition.com/ ),[国际](https://us.teambition.com/ ))
|
||||
@ -50,7 +61,18 @@
|
||||
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云)
|
||||
- [x] [Yandex.Disk](https://disk.yandex.com/)
|
||||
- [x] [百度网盘](http://pan.baidu.com/)
|
||||
- [x] [UC网盘](https://drive.uc.cn)
|
||||
- [x] [夸克网盘](https://pan.quark.cn)
|
||||
- [x] [迅雷网盘](https://pan.xunlei.com)
|
||||
- [x] [蓝奏云](https://www.lanzou.com/)
|
||||
- [x] [阿里云盘分享](https://www.aliyundrive.com/)
|
||||
- [x] [谷歌相册](https://photos.google.com/)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [一刻相册](https://photo.baidu.com/)
|
||||
- [x] SMB
|
||||
- [x] [115](https://115.com/)
|
||||
- [X] Cloudreve
|
||||
- [x] [Dropbox](https://www.dropbox.com/)
|
||||
- [x] 部署方便,开箱即用
|
||||
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
||||
- [x] 画廊模式下的图像预览
|
||||
@ -68,25 +90,37 @@
|
||||
- [x] 网页上传(可以允许访客上传),删除,新建文件夹,重命名,移动,复制
|
||||
- [x] 离线下载
|
||||
- [x] 跨存储复制文件
|
||||
- [x] 单线程下载/串流的多线程下载加速
|
||||
|
||||
## Document
|
||||
## 文档
|
||||
|
||||
<https://alist.nn.ci/zh/>
|
||||
|
||||
## Demo
|
||||
|
||||
<https://pan.nn.ci>
|
||||
<https://al.nn.ci>
|
||||
|
||||
## Discussion
|
||||
## 讨论
|
||||
|
||||
一般问题请到[讨论论坛](https://github.com/Xhofe/alist/discussions) ,**issue仅针对错误报告和功能请求。**
|
||||
|
||||
## Special sponsors
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.la/)
|
||||
- [KinhDown 百度云盘不限速下载!永久免费!已稳定运行3年!非常可靠!](https://kinhdown.com/?Type=Tutorials)
|
||||
## 赞助
|
||||
|
||||
AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我继续下去,请考虑赞助我或提供一个单一的捐款!感谢所有的爱和支持:https://alist.nn.ci/zh/guide/sponsor.html
|
||||
|
||||
### 特别赞助
|
||||
|
||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (国内API服务器赞助)
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||
|
||||
## 许可
|
||||
## 贡献者
|
||||
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
|
||||
## 许可
|
||||
|
||||
`AList` 是在 AGPL-3.0 许可下许可的开源软件。
|
||||
|
||||
@ -99,4 +133,4 @@
|
||||
|
||||
---
|
||||
|
||||
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@QQ群](https://jq.qq.com/?_wv=1027&k=YJJj2Gwb)
|
||||
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
|
138
README_ja.md
Normal file
138
README_ja.md
Normal file
@ -0,0 +1,138 @@
|
||||
<div align="center">
|
||||
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||
<p><em>🗂️Gin と Solidjs による、複数のストレージをサポートするファイルリストプログラム。</em></p>
|
||||
<div>
|
||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||
</a>
|
||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||
<img src="https://badges.crowdin.net/alist/localized.svg">
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<a href="https://github.com/Xhofe/alist/discussions">
|
||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://discord.gg/F4ymsH4xv2">
|
||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||
<img src="https://img.shields.io/docker/pulls/xhofe/alist?color=%2348BB78&logo=docker&label=pulls" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://alist.nn.ci/guide/sponsor.html">
|
||||
<img src="https://img.shields.io/badge/%24-sponsor-F87171.svg" alt="sponsor" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
[English](./README.md) | [中文](./README_cn.md) | 日本語 | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||
|
||||
## 特徴
|
||||
|
||||
- [x] マルチストレージ
|
||||
- [x] ローカルストレージ
|
||||
- [x] [Aliyundrive](https://www.aliyundrive.com/)
|
||||
- [x] OneDrive / Sharepoint ([グローバル](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
||||
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
||||
- [x] [GoogleDrive](https://drive.google.com/)
|
||||
- [x] [123pan](https://www.123pan.com/)
|
||||
- [x] FTP / SFTP
|
||||
- [x] [PikPak](https://www.mypikpak.com/)
|
||||
- [x] [S3](https://aws.amazon.com/s3/)
|
||||
- [x] [Seafile](https://seafile.com/)
|
||||
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
|
||||
- [x] WebDav(Support OneDrive/SharePoint without API)
|
||||
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
||||
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
||||
- [x] [139yun](https://yun.139.com/) (Personal, Family)
|
||||
- [x] [YandexDisk](https://disk.yandex.com/)
|
||||
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
||||
- [x] [Terabox](https://www.terabox.com/main)
|
||||
- [x] [UC](https://drive.uc.cn)
|
||||
- [x] [Quark](https://pan.quark.cn)
|
||||
- [x] [Thunder](https://pan.xunlei.com)
|
||||
- [x] [Lanzou](https://www.lanzou.com/)
|
||||
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
|
||||
- [x] [Google photo](https://photos.google.com/)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [Baidu photo](https://photo.baidu.com/)
|
||||
- [x] SMB
|
||||
- [x] [115](https://115.com/)
|
||||
- [X] Cloudreve
|
||||
- [x] [Dropbox](https://www.dropbox.com/)
|
||||
- [x] デプロイが簡単で、すぐに使える
|
||||
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
|
||||
- [x] ギャラリーモードでの画像プレビュー
|
||||
- [x] ビデオとオーディオのプレビュー、歌詞と字幕のサポート
|
||||
- [x] Office ドキュメントのプレビュー (docx, pptx, xlsx, ...)
|
||||
- [x] `README.md` のプレビューレンダリング
|
||||
- [x] ファイルのパーマリンクコピーと直接ダウンロード
|
||||
- [x] ダークモード
|
||||
- [x] 国際化
|
||||
- [x] 保護されたルート (パスワード保護と認証)
|
||||
- [x] WebDav (詳細は https://alist.nn.ci/guide/webdav.html を参照)
|
||||
- [x] [Docker デプロイ](https://hub.docker.com/r/xhofe/alist)
|
||||
- [x] Cloudflare ワーカープロキシ
|
||||
- [x] ファイル/フォルダパッケージのダウンロード
|
||||
- [x] ウェブアップロード(訪問者にアップロードを許可できる), 削除, mkdir, 名前変更, 移動, コピー
|
||||
- [x] オフラインダウンロード
|
||||
- [x] 二つのストレージ間でファイルをコピー
|
||||
- [x] シングルスレッドのダウンロード/ストリーム向けのマルチスレッド ダウンロード アクセラレーション
|
||||
|
||||
## ドキュメント
|
||||
|
||||
<https://alist.nn.ci/>
|
||||
|
||||
## デモ
|
||||
|
||||
<https://al.nn.ci>
|
||||
|
||||
## ディスカッション
|
||||
|
||||
一般的なご質問は[ディスカッションフォーラム](https://github.com/Xhofe/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
|
||||
|
||||
## スポンサー
|
||||
|
||||
AList はオープンソースのソフトウェアです。もしあなたがこのプロジェクトを気に入ってくださり、続けて欲しいと思ってくださるなら、ぜひスポンサーになってくださるか、1口でも寄付をしてくださるようご検討ください!すべての愛とサポートに感謝します:
|
||||
https://alist.nn.ci/guide/sponsor.html
|
||||
|
||||
### スペシャルスポンサー
|
||||
|
||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||
|
||||
## コントリビューター
|
||||
|
||||
これらの素晴らしい人々に感謝します:
|
||||
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
|
||||
## ライセンス
|
||||
|
||||
`AList` は AGPL-3.0 ライセンスの下でライセンスされたオープンソースソフトウェアです。
|
||||
|
||||
## 免責事項
|
||||
- このプログラムはフリーでオープンソースのプロジェクトです。ネットワークディスク上でファイルを共有するように設計されており、golang のダウンロードや学習に便利です。利用にあたっては関連法規を遵守し、悪用しないようお願いします;
|
||||
- このプログラムは、公式インターフェースの動作を破壊することなく、公式 sdk/インターフェースを呼び出すことで実装されています;
|
||||
- このプログラムは、302リダイレクト/トラフィック転送のみを行い、いかなるユーザーデータも傍受、保存、改ざんしません;
|
||||
- このプログラムを使用する前に、アカウントの禁止、ダウンロード速度の制限など、対応するリスクを理解し、負担する必要があります;
|
||||
- もし侵害があれば、[メール](mailto:i@nn.ci)で私に連絡してください。
|
||||
|
||||
---
|
||||
|
||||
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
103
build.sh
103
build.sh
@ -1,7 +1,7 @@
|
||||
appName="alist"
|
||||
builtAt="$(date +'%F %T %z')"
|
||||
goVersion=$(go version | sed 's/go version //')
|
||||
gitAuthor=$(git show -s --format='format:%aN <%ae>' HEAD)
|
||||
gitAuthor="Xhofe <i@nn.ci>"
|
||||
gitCommit=$(git log --pretty=format:"%h" -1)
|
||||
|
||||
if [ "$1" = "dev" ]; then
|
||||
@ -12,7 +12,8 @@ else
|
||||
webVersion=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||
fi
|
||||
|
||||
echo "build version: $gitTag"
|
||||
echo "backend version: $version"
|
||||
echo "frontend version: $webVersion"
|
||||
|
||||
ldflags="\
|
||||
-w -s \
|
||||
@ -25,11 +26,11 @@ ldflags="\
|
||||
"
|
||||
|
||||
FetchWebDev() {
|
||||
curl -L https://codeload.github.com/alist-org/web-dist/tar.gz/refs/heads/main -o web-dist-main.tar.gz
|
||||
tar -zxvf web-dist-main.tar.gz
|
||||
curl -L https://codeload.github.com/alist-org/web-dist/tar.gz/refs/heads/dev -o web-dist-dev.tar.gz
|
||||
tar -zxvf web-dist-dev.tar.gz
|
||||
rm -rf public/dist
|
||||
mv -f web-dist-main/dist public
|
||||
rm -rf web-dist-main web-dist-main.tar.gz
|
||||
mv -f web-dist-dev/dist public
|
||||
rm -rf web-dist-dev web-dist-dev.tar.gz
|
||||
}
|
||||
|
||||
FetchWebRelease() {
|
||||
@ -40,14 +41,45 @@ FetchWebRelease() {
|
||||
rm -rf dist.tar.gz
|
||||
}
|
||||
|
||||
BuildWinArm64() {
|
||||
echo building for windows-arm64
|
||||
chmod +x ./wrapper/zcc-arm64
|
||||
chmod +x ./wrapper/zcxx-arm64
|
||||
export GOOS=windows
|
||||
export GOARCH=arm64
|
||||
export CC=$(pwd)/wrapper/zcc-arm64
|
||||
export CXX=$(pwd)/wrapper/zcxx-arm64
|
||||
go build -o "$1" -ldflags="$ldflags" -tags=jsoniter .
|
||||
}
|
||||
|
||||
BuildDev() {
|
||||
rm -rf .git/
|
||||
xgo -targets=linux/amd64,windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
mkdir -p "dist"
|
||||
muslflags="--extldflags '-static -fpic' $ldflags"
|
||||
BASE="https://musl.nn.ci/"
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
curl -L -o "${i}.tgz" "${url}"
|
||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||
done
|
||||
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
echo building for ${os_arch}
|
||||
export GOOS=${os_arch%%-*}
|
||||
export GOARCH=${os_arch##*-}
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||
done
|
||||
xgo -targets=windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
mv alist-* dist
|
||||
cd dist
|
||||
upx -9 ./alist-linux*
|
||||
upx -9 ./alist-windows*
|
||||
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
||||
upx -9 ./alist-windows-amd64-upx.exe
|
||||
find . -type f -print0 | xargs -0 md5sum >md5.txt
|
||||
cat md5.txt
|
||||
}
|
||||
@ -61,14 +93,15 @@ BuildRelease() {
|
||||
mkdir -p "build"
|
||||
muslflags="--extldflags '-static -fpic' $ldflags"
|
||||
BASE="https://musl.nn.ci/"
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross arm-linux-musleabihf-cross mips-linux-musl-cross mips64-linux-musl-cross mips64el-linux-musl-cross mipsel-linux-musl-cross powerpc64le-linux-musl-cross s390x-linux-musl-cross)
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross mips-linux-musl-cross mips64-linux-musl-cross mips64el-linux-musl-cross mipsel-linux-musl-cross powerpc64le-linux-musl-cross s390x-linux-musl-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
curl -L -o "${i}.tgz" "${url}"
|
||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||
rm -f "${i}.tgz"
|
||||
done
|
||||
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64 linux-musl-arm linux-musl-mips linux-musl-mips64 linux-musl-mips64le linux-musl-mipsle linux-musl-ppc64le linux-musl-s390x)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc arm-linux-musleabihf-gcc mips-linux-musl-gcc mips64-linux-musl-gcc mips64el-linux-musl-gcc mipsel-linux-musl-gcc powerpc64le-linux-musl-gcc s390x-linux-musl-gcc)
|
||||
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64 linux-musl-mips linux-musl-mips64 linux-musl-mips64le linux-musl-mipsle linux-musl-ppc64le linux-musl-s390x)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc mips-linux-musl-gcc mips64-linux-musl-gcc mips64el-linux-musl-gcc mipsel-linux-musl-gcc powerpc64le-linux-musl-gcc s390x-linux-musl-gcc)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
@ -79,13 +112,48 @@ BuildRelease() {
|
||||
export CGO_ENABLED=1
|
||||
go build -o ./build/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||
done
|
||||
BuildWinArm64 ./build/alist-windows-arm64.exe
|
||||
xgo -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
# why? Because some target platforms seem to have issues with upx compression
|
||||
upx -9 ./alist-linux-amd64
|
||||
upx -9 ./alist-windows*
|
||||
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
||||
upx -9 ./alist-windows-amd64-upx.exe
|
||||
mv alist-* build
|
||||
}
|
||||
|
||||
BuildReleaseLinuxMuslArm() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build"
|
||||
muslflags="--extldflags '-static -fpic' $ldflags"
|
||||
BASE="https://musl.nn.ci/"
|
||||
# FILES=(arm-linux-musleabi-cross arm-linux-musleabihf-cross armeb-linux-musleabi-cross armeb-linux-musleabihf-cross armel-linux-musleabi-cross armel-linux-musleabihf-cross armv5l-linux-musleabi-cross armv5l-linux-musleabihf-cross armv6-linux-musleabi-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross armv7m-linux-musleabi-cross armv7r-linux-musleabihf-cross)
|
||||
FILES=(arm-linux-musleabi-cross arm-linux-musleabihf-cross armel-linux-musleabi-cross armel-linux-musleabihf-cross armv5l-linux-musleabi-cross armv5l-linux-musleabihf-cross armv6-linux-musleabi-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross armv7m-linux-musleabi-cross armv7r-linux-musleabihf-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
curl -L -o "${i}.tgz" "${url}"
|
||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||
rm -f "${i}.tgz"
|
||||
done
|
||||
# OS_ARCHES=(linux-musleabi-arm linux-musleabihf-arm linux-musleabi-armeb linux-musleabihf-armeb linux-musleabi-armel linux-musleabihf-armel linux-musleabi-armv5l linux-musleabihf-armv5l linux-musleabi-armv6 linux-musleabihf-armv6 linux-musleabihf-armv7l linux-musleabi-armv7m linux-musleabihf-armv7r)
|
||||
# CGO_ARGS=(arm-linux-musleabi-gcc arm-linux-musleabihf-gcc armeb-linux-musleabi-gcc armeb-linux-musleabihf-gcc armel-linux-musleabi-gcc armel-linux-musleabihf-gcc armv5l-linux-musleabi-gcc armv5l-linux-musleabihf-gcc armv6-linux-musleabi-gcc armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc armv7m-linux-musleabi-gcc armv7r-linux-musleabihf-gcc)
|
||||
# GOARMS=('' '' '' '' '' '' '5' '5' '6' '6' '7' '7' '7')
|
||||
OS_ARCHES=(linux-musleabi-arm linux-musleabihf-arm linux-musleabi-armel linux-musleabihf-armel linux-musleabi-armv5l linux-musleabihf-armv5l linux-musleabi-armv6 linux-musleabihf-armv6 linux-musleabihf-armv7l linux-musleabi-armv7m linux-musleabihf-armv7r)
|
||||
CGO_ARGS=(arm-linux-musleabi-gcc arm-linux-musleabihf-gcc armel-linux-musleabi-gcc armel-linux-musleabihf-gcc armv5l-linux-musleabi-gcc armv5l-linux-musleabihf-gcc armv6-linux-musleabi-gcc armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc armv7m-linux-musleabi-gcc armv7r-linux-musleabihf-gcc)
|
||||
GOARMS=('' '' '' '' '5' '5' '6' '6' '7' '7' '7')
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
arm=${GOARMS[$i]}
|
||||
echo building for ${os_arch}
|
||||
export GOOS=linux
|
||||
export GOARCH=arm
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
export GOARM=${arm}
|
||||
go build -o ./build/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||
done
|
||||
}
|
||||
|
||||
MakeRelease() {
|
||||
cd build
|
||||
mkdir compress
|
||||
@ -105,8 +173,8 @@ MakeRelease() {
|
||||
rm -f alist.exe
|
||||
done
|
||||
cd compress
|
||||
find . -type f -print0 | xargs -0 md5sum >md5.txt
|
||||
cat md5.txt
|
||||
find . -type f -print0 | xargs -0 md5sum >"$1"
|
||||
cat "$1"
|
||||
cd ../..
|
||||
}
|
||||
|
||||
@ -121,9 +189,12 @@ elif [ "$1" = "release" ]; then
|
||||
FetchWebRelease
|
||||
if [ "$2" = "docker" ]; then
|
||||
BuildDocker
|
||||
elif [ "$2" = "linux_musl_arm" ]; then
|
||||
BuildReleaseLinuxMuslArm
|
||||
MakeRelease "md5-linux-musl-arm.txt"
|
||||
else
|
||||
BuildRelease
|
||||
MakeRelease
|
||||
MakeRelease "md5.txt"
|
||||
fi
|
||||
else
|
||||
echo -e "Parameter error"
|
||||
|
97
cmd/admin.go
Normal file
97
cmd/admin.go
Normal file
@ -0,0 +1,97 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/internal/setting"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// AdminCmd represents the password command
|
||||
var AdminCmd = &cobra.Command{
|
||||
Use: "admin",
|
||||
Aliases: []string{"password"},
|
||||
Short: "Show admin user's info and some operations about admin user's password",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed get admin user: %+v", err)
|
||||
} else {
|
||||
utils.Log.Infof("Admin user's username: %s", admin.Username)
|
||||
utils.Log.Infof("The password can only be output at the first startup, and then stored as a hash value, which cannot be reversed")
|
||||
utils.Log.Infof("You can reset the password with a random string by running [alist admin random]")
|
||||
utils.Log.Infof("You can also set a new password by running [alist admin set NEW_PASSWORD]")
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
var RandomPasswordCmd = &cobra.Command{
|
||||
Use: "random",
|
||||
Short: "Reset admin user's password to a random string",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
newPwd := random.String(8)
|
||||
setAdminPassword(newPwd)
|
||||
},
|
||||
}
|
||||
|
||||
var SetPasswordCmd = &cobra.Command{
|
||||
Use: "set",
|
||||
Short: "Set admin user's password",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if len(args) == 0 {
|
||||
utils.Log.Errorf("Please enter the new password")
|
||||
return
|
||||
}
|
||||
setAdminPassword(args[0])
|
||||
},
|
||||
}
|
||||
|
||||
var ShowTokenCmd = &cobra.Command{
|
||||
Use: "token",
|
||||
Short: "Show admin token",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
token := setting.GetStr(conf.Token)
|
||||
utils.Log.Infof("Admin token: %s", token)
|
||||
},
|
||||
}
|
||||
|
||||
func setAdminPassword(pwd string) {
|
||||
Init()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed get admin user: %+v", err)
|
||||
return
|
||||
}
|
||||
admin.SetPassword(pwd)
|
||||
if err := op.UpdateUser(admin); err != nil {
|
||||
utils.Log.Errorf("failed update admin user: %+v", err)
|
||||
return
|
||||
}
|
||||
utils.Log.Infof("admin user has been updated:")
|
||||
utils.Log.Infof("username: %s", admin.Username)
|
||||
utils.Log.Infof("password: %s", pwd)
|
||||
DelAdminCacheOnline()
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(AdminCmd)
|
||||
AdminCmd.AddCommand(RandomPasswordCmd)
|
||||
AdminCmd.AddCommand(SetPasswordCmd)
|
||||
AdminCmd.AddCommand(ShowTokenCmd)
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// passwordCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// passwordCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
@ -4,31 +4,34 @@ Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/db"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// cancel2FACmd represents the delete2fa command
|
||||
var cancel2FACmd = &cobra.Command{
|
||||
// Cancel2FACmd represents the delete2fa command
|
||||
var Cancel2FACmd = &cobra.Command{
|
||||
Use: "cancel2fa",
|
||||
Short: "Delete 2FA of admin user",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
admin, err := db.GetAdmin()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to get admin user: %+v", err)
|
||||
} else {
|
||||
err := db.Cancel2FAByUser(admin)
|
||||
err := op.Cancel2FAByUser(admin)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to cancel 2FA: %+v", err)
|
||||
} else {
|
||||
utils.Log.Info("2FA canceled")
|
||||
DelAdminCacheOnline()
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(cancel2FACmd)
|
||||
RootCmd.AddCommand(Cancel2FACmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
|
@ -1,8 +1,14 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/bootstrap"
|
||||
"github.com/alist-org/alist/v3/internal/bootstrap/data"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func Init() {
|
||||
@ -10,4 +16,29 @@ func Init() {
|
||||
bootstrap.Log()
|
||||
bootstrap.InitDB()
|
||||
data.InitData()
|
||||
bootstrap.InitIndex()
|
||||
}
|
||||
|
||||
var pid = -1
|
||||
var pidFile string
|
||||
|
||||
func initDaemon() {
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
exPath := filepath.Dir(ex)
|
||||
_ = os.MkdirAll(filepath.Join(exPath, "daemon"), 0700)
|
||||
pidFile = filepath.Join(exPath, "daemon/pid")
|
||||
if utils.Exists(pidFile) {
|
||||
bytes, err := os.ReadFile(pidFile)
|
||||
if err != nil {
|
||||
log.Fatal("failed to read pid file", err)
|
||||
}
|
||||
id, err := strconv.Atoi(string(bytes))
|
||||
if err != nil {
|
||||
log.Fatal("failed to parse pid data", err)
|
||||
}
|
||||
pid = id
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,10 @@
|
||||
package flags
|
||||
|
||||
var (
|
||||
Config string // config file
|
||||
Debug bool
|
||||
NoPrefix bool
|
||||
Dev bool
|
||||
DataDir string
|
||||
Debug bool
|
||||
NoPrefix bool
|
||||
Dev bool
|
||||
ForceBinDir bool
|
||||
LogStd bool
|
||||
)
|
||||
|
17
cmd/lang.go
17
cmd/lang.go
@ -71,17 +71,26 @@ func writeFile(name string, data interface{}) {
|
||||
} else {
|
||||
log.Infof("%s.json changed, update file", name)
|
||||
//log.Infof("old: %+v\nnew:%+v", oldData, data)
|
||||
utils.WriteJsonToFile(fmt.Sprintf("lang/%s.json", name), data)
|
||||
utils.WriteJsonToFile(fmt.Sprintf("lang/%s.json", name), newData, true)
|
||||
}
|
||||
}
|
||||
|
||||
func generateDriversJson() {
|
||||
drivers := make(Drivers)
|
||||
drivers["drivers"] = make(KV[interface{}])
|
||||
drivers["config"] = make(KV[interface{}])
|
||||
driverInfoMap := op.GetDriverInfoMap()
|
||||
for k, v := range driverInfoMap {
|
||||
drivers["drivers"][k] = convert(k)
|
||||
items := make(KV[interface{}])
|
||||
config := map[string]string{}
|
||||
if v.Config.Alert != "" {
|
||||
alert := strings.SplitN(v.Config.Alert, "|", 2)
|
||||
if len(alert) > 1 {
|
||||
config["alert"] = alert[1]
|
||||
}
|
||||
}
|
||||
drivers["config"][k] = config
|
||||
for i := range v.Additional {
|
||||
item := v.Additional[i]
|
||||
items[item.Name] = convert(item.Name)
|
||||
@ -123,8 +132,8 @@ func generateSettingsJson() {
|
||||
//utils.WriteJsonToFile("lang/settings.json", settingsLang)
|
||||
}
|
||||
|
||||
// langCmd represents the lang command
|
||||
var langCmd = &cobra.Command{
|
||||
// LangCmd represents the lang command
|
||||
var LangCmd = &cobra.Command{
|
||||
Use: "lang",
|
||||
Short: "Generate language json file",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
@ -138,7 +147,7 @@ var langCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(langCmd)
|
||||
RootCmd.AddCommand(LangCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
|
@ -1,39 +0,0 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/db"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// passwordCmd represents the password command
|
||||
var passwordCmd = &cobra.Command{
|
||||
Use: "password",
|
||||
Short: "Show admin user's password",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
admin, err := db.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed get admin user: %+v", err)
|
||||
} else {
|
||||
utils.Log.Infof("admin user's password is: %s", admin.Password)
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(passwordCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// passwordCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// passwordCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
32
cmd/restart.go
Normal file
32
cmd/restart.go
Normal file
@ -0,0 +1,32 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// RestartCmd represents the restart command
|
||||
var RestartCmd = &cobra.Command{
|
||||
Use: "restart",
|
||||
Short: "Restart alist server by daemon/pid file",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
stop()
|
||||
start()
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(RestartCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// restartCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// restartCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
14
cmd/root.go
14
cmd/root.go
@ -8,7 +8,7 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
var RootCmd = &cobra.Command{
|
||||
Use: "alist",
|
||||
Short: "A file list program that supports multiple storage.",
|
||||
Long: `A file list program that supports multiple storage,
|
||||
@ -17,15 +17,17 @@ Complete documentation is available at https://alist.nn.ci/`,
|
||||
}
|
||||
|
||||
func Execute() {
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
if err := RootCmd.Execute(); err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.PersistentFlags().StringVar(&flags.Config, "conf", "data/config.json", "config file")
|
||||
rootCmd.PersistentFlags().BoolVar(&flags.Debug, "debug", false, "start with debug mode")
|
||||
rootCmd.PersistentFlags().BoolVar(&flags.NoPrefix, "no-prefix", false, "disable env prefix")
|
||||
rootCmd.PersistentFlags().BoolVar(&flags.Dev, "dev", false, "start with dev mode")
|
||||
RootCmd.PersistentFlags().StringVar(&flags.DataDir, "data", "data", "data folder")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.Debug, "debug", false, "start with debug mode")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.NoPrefix, "no-prefix", false, "disable env prefix")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.Dev, "dev", false, "start with dev mode")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.ForceBinDir, "force-bin-dir", false, "Force to use the directory where the binary file is located as data directory")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.LogStd, "log-std", false, "Force to log to std")
|
||||
}
|
||||
|
128
cmd/server.go
128
cmd/server.go
@ -3,9 +3,12 @@ package cmd
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strconv"
|
||||
"sync"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
@ -20,15 +23,20 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// serverCmd represents the server command
|
||||
var serverCmd = &cobra.Command{
|
||||
// ServerCmd represents the server command
|
||||
var ServerCmd = &cobra.Command{
|
||||
Use: "server",
|
||||
Short: "Start the server at the specified address",
|
||||
Long: `Start the server at the specified address
|
||||
the address is defined in config file`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
if conf.Conf.DelayedStart != 0 {
|
||||
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
|
||||
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
|
||||
}
|
||||
bootstrap.InitAria2()
|
||||
bootstrap.InitQbittorrent()
|
||||
bootstrap.LoadStorages()
|
||||
if !flags.Debug && !flags.Dev {
|
||||
gin.SetMode(gin.ReleaseMode)
|
||||
@ -36,47 +44,100 @@ the address is defined in config file`,
|
||||
r := gin.New()
|
||||
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
||||
server.Init(r)
|
||||
base := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port)
|
||||
utils.Log.Infof("start server @ %s", base)
|
||||
srv := &http.Server{Addr: base, Handler: r}
|
||||
go func() {
|
||||
var err error
|
||||
if conf.Conf.Scheme.Https {
|
||||
//err = r.RunTLS(base, conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||
err = srv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||
} else {
|
||||
err = srv.ListenAndServe()
|
||||
}
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
utils.Log.Fatalf("failed to start: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
var httpSrv, httpsSrv, unixSrv *http.Server
|
||||
if conf.Conf.Scheme.HttpPort != -1 {
|
||||
httpBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpPort)
|
||||
utils.Log.Infof("start HTTP server @ %s", httpBase)
|
||||
httpSrv = &http.Server{Addr: httpBase, Handler: r}
|
||||
go func() {
|
||||
err := httpSrv.ListenAndServe()
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
utils.Log.Fatalf("failed to start http: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.HttpsPort != -1 {
|
||||
httpsBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpsPort)
|
||||
utils.Log.Infof("start HTTPS server @ %s", httpsBase)
|
||||
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
|
||||
go func() {
|
||||
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
utils.Log.Fatalf("failed to start https: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.UnixFile != "" {
|
||||
utils.Log.Infof("start unix server @ %s", conf.Conf.Scheme.UnixFile)
|
||||
unixSrv = &http.Server{Handler: r}
|
||||
go func() {
|
||||
listener, err := net.Listen("unix", conf.Conf.Scheme.UnixFile)
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("failed to listen unix: %+v", err)
|
||||
}
|
||||
// set socket file permission
|
||||
mode, err := strconv.ParseUint(conf.Conf.Scheme.UnixFilePerm, 8, 32)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to parse socket file permission: %+v", err)
|
||||
} else {
|
||||
err = os.Chmod(conf.Conf.Scheme.UnixFile, os.FileMode(mode))
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to chmod socket file: %+v", err)
|
||||
}
|
||||
}
|
||||
err = unixSrv.Serve(listener)
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
utils.Log.Fatalf("failed to start unix: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
// Wait for interrupt signal to gracefully shutdown the server with
|
||||
// a timeout of 5 seconds.
|
||||
quit := make(chan os.Signal)
|
||||
// a timeout of 1 second.
|
||||
quit := make(chan os.Signal, 1)
|
||||
// kill (no param) default send syscanll.SIGTERM
|
||||
// kill -2 is syscall.SIGINT
|
||||
// kill -9 is syscall. SIGKILL but can"t be catch, so don't need add it
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
utils.Log.Println("Shutdown Server ...")
|
||||
utils.Log.Println("Shutdown server...")
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
||||
defer cancel()
|
||||
if err := srv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("Server Shutdown:", err)
|
||||
var wg sync.WaitGroup
|
||||
if conf.Conf.Scheme.HttpPort != -1 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := httpSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("HTTP server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
// catching ctx.Done(). timeout of 3 seconds.
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
utils.Log.Println("timeout of 3 seconds.")
|
||||
if conf.Conf.Scheme.HttpsPort != -1 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := httpsSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("HTTPS server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
utils.Log.Println("Server exiting")
|
||||
if conf.Conf.Scheme.UnixFile != "" {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := unixSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("Unix server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
utils.Log.Println("Server exit")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(serverCmd)
|
||||
RootCmd.AddCommand(ServerCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
@ -88,3 +149,12 @@ func init() {
|
||||
// is called directly, e.g.:
|
||||
// serverCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
|
||||
// OutAlistInit 暴露用于外部启动server的函数
|
||||
func OutAlistInit() {
|
||||
var (
|
||||
cmd *cobra.Command
|
||||
args []string
|
||||
)
|
||||
ServerCmd.Run(cmd, args)
|
||||
}
|
||||
|
71
cmd/start.go
Normal file
71
cmd/start.go
Normal file
@ -0,0 +1,71 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// StartCmd represents the start command
|
||||
var StartCmd = &cobra.Command{
|
||||
Use: "start",
|
||||
Short: "Silent start alist server with `--force-bin-dir`",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
start()
|
||||
},
|
||||
}
|
||||
|
||||
func start() {
|
||||
initDaemon()
|
||||
if pid != -1 {
|
||||
_, err := os.FindProcess(pid)
|
||||
if err == nil {
|
||||
log.Info("alist already started, pid ", pid)
|
||||
return
|
||||
}
|
||||
}
|
||||
args := os.Args
|
||||
args[1] = "server"
|
||||
args = append(args, "--force-bin-dir")
|
||||
cmd := &exec.Cmd{
|
||||
Path: args[0],
|
||||
Args: args,
|
||||
Env: os.Environ(),
|
||||
}
|
||||
stdout, err := os.OpenFile(filepath.Join(filepath.Dir(pidFile), "start.log"), os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0666)
|
||||
if err != nil {
|
||||
log.Fatal(os.Getpid(), ": failed to open start log file:", err)
|
||||
}
|
||||
cmd.Stderr = stdout
|
||||
cmd.Stdout = stdout
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
log.Fatal("failed to start children process: ", err)
|
||||
}
|
||||
log.Infof("success start pid: %d", cmd.Process.Pid)
|
||||
err = os.WriteFile(pidFile, []byte(strconv.Itoa(cmd.Process.Pid)), 0666)
|
||||
if err != nil {
|
||||
log.Warn("failed to record pid, you may not be able to stop the program with `./alist stop`")
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(StartCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// startCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// startCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
58
cmd/stop.go
Normal file
58
cmd/stop.go
Normal file
@ -0,0 +1,58 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// StopCmd represents the stop command
|
||||
var StopCmd = &cobra.Command{
|
||||
Use: "stop",
|
||||
Short: "Stop alist server by daemon/pid file",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
stop()
|
||||
},
|
||||
}
|
||||
|
||||
func stop() {
|
||||
initDaemon()
|
||||
if pid == -1 {
|
||||
log.Info("Seems not have been started. Try use `alist start` to start server.")
|
||||
return
|
||||
}
|
||||
process, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
|
||||
return
|
||||
}
|
||||
err = process.Kill()
|
||||
if err != nil {
|
||||
log.Errorf("failed to kill process %d: %v", pid, err)
|
||||
} else {
|
||||
log.Info("killed process: ", pid)
|
||||
}
|
||||
err = os.Remove(pidFile)
|
||||
if err != nil {
|
||||
log.Errorf("failed to remove pid file")
|
||||
}
|
||||
pid = -1
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(StopCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
52
cmd/storage.go
Normal file
52
cmd/storage.go
Normal file
@ -0,0 +1,52 @@
|
||||
/*
|
||||
Copyright © 2023 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/db"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// storageCmd represents the storage command
|
||||
var storageCmd = &cobra.Command{
|
||||
Use: "storage",
|
||||
Short: "Manage storage",
|
||||
}
|
||||
|
||||
func init() {
|
||||
var mountPath string
|
||||
var disable = &cobra.Command{
|
||||
Use: "disable",
|
||||
Short: "Disable a storage",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
storage, err := db.GetStorageByMountPath(mountPath)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to query storage: %+v", err)
|
||||
} else {
|
||||
storage.Disabled = true
|
||||
err = db.UpdateStorage(storage)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to update storage: %+v", err)
|
||||
} else {
|
||||
utils.Log.Infof("Storage with mount path [%s] have been disabled", mountPath)
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
disable.Flags().StringVarP(&mountPath, "mount-path", "m", "", "The mountPath of storage")
|
||||
RootCmd.AddCommand(storageCmd)
|
||||
storageCmd.AddCommand(disable)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// storageCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// storageCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
52
cmd/user.go
Normal file
52
cmd/user.go
Normal file
@ -0,0 +1,52 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/internal/setting"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func DelAdminCacheOnline() {
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("[del_admin_cache] get admin error: %+v", err)
|
||||
return
|
||||
}
|
||||
DelUserCacheOnline(admin.Username)
|
||||
}
|
||||
|
||||
func DelUserCacheOnline(username string) {
|
||||
client := resty.New().SetTimeout(1 * time.Second).SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
token := setting.GetStr(conf.Token)
|
||||
port := conf.Conf.Scheme.HttpPort
|
||||
u := fmt.Sprintf("http://localhost:%d/api/admin/user/del_cache", port)
|
||||
if port == -1 {
|
||||
if conf.Conf.Scheme.HttpsPort == -1 {
|
||||
utils.Log.Warnf("[del_user_cache] no open port")
|
||||
return
|
||||
}
|
||||
u = fmt.Sprintf("https://localhost:%d/api/admin/user/del_cache", conf.Conf.Scheme.HttpsPort)
|
||||
}
|
||||
res, err := client.R().SetHeader("Authorization", token).SetQueryParam("username", username).Post(u)
|
||||
if err != nil {
|
||||
utils.Log.Warnf("[del_user_cache_online] failed: %+v", err)
|
||||
return
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
utils.Log.Warnf("[del_user_cache_online] failed: %+v", res.String())
|
||||
return
|
||||
}
|
||||
code := utils.Json.Get(res.Body(), "code").ToInt()
|
||||
msg := utils.Json.Get(res.Body(), "message").ToString()
|
||||
if code != 200 {
|
||||
utils.Log.Errorf("[del_user_cache_online] error: %s", msg)
|
||||
return
|
||||
}
|
||||
utils.Log.Debugf("[del_user_cache_online] del user [%s] cache success", username)
|
||||
}
|
@ -1,6 +1,5 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
|
||||
*/
|
||||
package cmd
|
||||
|
||||
@ -12,8 +11,8 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// versionCmd represents the version command
|
||||
var versionCmd = &cobra.Command{
|
||||
// VersionCmd represents the version command
|
||||
var VersionCmd = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Show current version of AList",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
@ -30,7 +29,7 @@ WebVersion: %s
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(versionCmd)
|
||||
RootCmd.AddCommand(VersionCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
|
16
docker-compose.yml
Normal file
16
docker-compose.yml
Normal file
@ -0,0 +1,16 @@
|
||||
version: '3.3'
|
||||
services:
|
||||
alist:
|
||||
restart: always
|
||||
volumes:
|
||||
- '/etc/alist:/opt/alist/data'
|
||||
ports:
|
||||
- '5244:5244'
|
||||
- '5245:5245'
|
||||
environment:
|
||||
- PUID=0
|
||||
- PGID=0
|
||||
- UMASK=022
|
||||
- TZ=UTC
|
||||
container_name: alist
|
||||
image: 'xhofe/alist:latest'
|
97
drivers/115/driver.go
Normal file
97
drivers/115/driver.go
Normal file
@ -0,0 +1,97 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type Pan115 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *driver115.Pan115Client
|
||||
}
|
||||
|
||||
func (d *Pan115) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Pan115) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan115) Init(ctx context.Context) error {
|
||||
return d.login()
|
||||
}
|
||||
|
||||
func (d *Pan115) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
if err != nil && !errors.Is(err, driver115.ErrNotExist) {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src driver115.File) (model.Obj, error) {
|
||||
return src, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
downloadInfo, err := d.client.
|
||||
SetUserAgent(driver115.UA115Browser).
|
||||
Download(file.(driver115.File).PickCode)
|
||||
// recover for upload
|
||||
d.client.SetUserAgent(driver115.UA115Desktop)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
link := &model.Link{
|
||||
URL: downloadInfo.Url.Url,
|
||||
Header: downloadInfo.Header,
|
||||
}
|
||||
return link, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return d.client.Move(dstDir.GetID(), srcObj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
return d.client.Rename(srcObj.GetID(), newName)
|
||||
}
|
||||
|
||||
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return d.client.Copy(dstDir.GetID(), srcObj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return d.client.Delete(obj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
return d.client.UploadFastOrByMultipart(dstDir.GetID(), stream.GetName(), stream.GetSize(), tempFile)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan115)(nil)
|
27
drivers/115/meta.go
Normal file
27
drivers/115/meta.go
Normal file
@ -0,0 +1,27 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Cloud",
|
||||
DefaultRoot: "0",
|
||||
OnlyProxy: true,
|
||||
OnlyLocal: true,
|
||||
NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan115{}
|
||||
})
|
||||
}
|
8
drivers/115/types.go
Normal file
8
drivers/115/types.go
Normal file
@ -0,0 +1,8 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
var _ model.Obj = (*driver.File)(nil)
|
57
drivers/115/util.go
Normal file
57
drivers/115/util.go
Normal file
@ -0,0 +1,57 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
|
||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var UserAgent = driver.UA115Desktop
|
||||
|
||||
func (d *Pan115) login() error {
|
||||
var err error
|
||||
opts := []driver.Option{
|
||||
driver.UA(UserAgent),
|
||||
func(c *driver.Pan115Client) {
|
||||
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
},
|
||||
}
|
||||
d.client = driver.New(opts...)
|
||||
cr := &driver.Credential{}
|
||||
if d.Addition.QRCodeToken != "" {
|
||||
s := &driver.QRCodeSession{
|
||||
UID: d.Addition.QRCodeToken,
|
||||
}
|
||||
if cr, err = d.client.QRCodeLogin(s); err != nil {
|
||||
return errors.Wrap(err, "failed to login by qrcode")
|
||||
}
|
||||
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||
d.Addition.QRCodeToken = ""
|
||||
} else if d.Addition.Cookie != "" {
|
||||
if err = cr.FromCookie(d.Addition.Cookie); err != nil {
|
||||
return errors.Wrap(err, "failed to login by cookies")
|
||||
}
|
||||
d.client.ImportCredential(cr)
|
||||
} else {
|
||||
return errors.New("missing cookie or qrcode account")
|
||||
}
|
||||
return d.client.LoginCheck()
|
||||
}
|
||||
|
||||
func (d *Pan115) getFiles(fileId string) ([]driver.File, error) {
|
||||
res := make([]driver.File, 0)
|
||||
if d.PageSize <= 0 {
|
||||
d.PageSize = driver.FileListLimit
|
||||
}
|
||||
files, err := d.client.ListWithLimit(fileId, d.PageSize)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, file := range *files {
|
||||
res = append(res, file)
|
||||
}
|
||||
return res, nil
|
||||
}
|
@ -1,18 +1,17 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/binary"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
@ -22,12 +21,12 @@ import (
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Pan123 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
func (d *Pan123) Config() driver.Config {
|
||||
@ -35,19 +34,18 @@ func (d *Pan123) Config() driver.Config {
|
||||
}
|
||||
|
||||
func (d *Pan123) GetAddition() driver.Additional {
|
||||
return d.Addition
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan123) Init(ctx context.Context, storage model.Storage) error {
|
||||
d.Storage = storage
|
||||
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return d.login()
|
||||
func (d *Pan123) Init(ctx context.Context) error {
|
||||
_, err := d.request(UserInfo, http.MethodGet, nil, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) Drop(ctx context.Context) error {
|
||||
_, _ = d.request(Logout, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{})
|
||||
}, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -61,16 +59,64 @@ func (d *Pan123) List(ctx context.Context, dir model.Obj, args model.ListArgs) (
|
||||
})
|
||||
}
|
||||
|
||||
//func (d *Pan123) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
// // this is optional
|
||||
// return nil, errs.NotImplement
|
||||
//}
|
||||
|
||||
func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if f, ok := file.(File); ok {
|
||||
return &model.Link{
|
||||
URL: f.DownloadUrl,
|
||||
}, nil
|
||||
//var resp DownResp
|
||||
var headers map[string]string
|
||||
if !utils.IsLocalIPAddr(args.IP) {
|
||||
headers = map[string]string{
|
||||
//"X-Real-IP": "1.1.1.1",
|
||||
"X-Forwarded-For": args.IP,
|
||||
}
|
||||
}
|
||||
data := base.Json{
|
||||
"driveId": 0,
|
||||
"etag": f.Etag,
|
||||
"fileId": f.FileId,
|
||||
"fileName": f.FileName,
|
||||
"s3keyFlag": f.S3KeyFlag,
|
||||
"size": f.Size,
|
||||
"type": f.Type,
|
||||
}
|
||||
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetHeaders(headers)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
downloadUrl := utils.Json.Get(resp, "data", "DownloadUrl").ToString()
|
||||
u, err := url.Parse(downloadUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nu := u.Query().Get("params")
|
||||
if nu != "" {
|
||||
du, _ := base64.StdEncoding.DecodeString(nu)
|
||||
u, err = url.Parse(string(du))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
u_ := u.String()
|
||||
log.Debug("download url: ", u_)
|
||||
res, err := base.NoRedirectClient.R().SetHeader("Referer", "https://www.123pan.com/").Get(u_)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debug(res.String())
|
||||
link := model.Link{
|
||||
URL: u_,
|
||||
}
|
||||
log.Debugln("res code: ", res.StatusCode())
|
||||
if res.StatusCode() == 302 {
|
||||
link.URL = res.Header().Get("location")
|
||||
} else if res.StatusCode() < 300 {
|
||||
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
|
||||
}
|
||||
link.Header = http.Header{
|
||||
"Referer": []string{"https://www.123pan.com/"},
|
||||
}
|
||||
return &link, nil
|
||||
} else {
|
||||
return nil, fmt.Errorf("can't convert obj")
|
||||
}
|
||||
@ -85,7 +131,7 @@ func (d *Pan123) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
||||
"size": 0,
|
||||
"type": 1,
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/api/file/upload_request", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(Mkdir, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -96,7 +142,7 @@ func (d *Pan123) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
"fileIdList": []base.Json{{"FileId": srcObj.GetID()}},
|
||||
"parentFileId": dstDir.GetID(),
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/api/file/mod_pid", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(Move, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -108,7 +154,7 @@ func (d *Pan123) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
||||
"fileId": srcObj.GetID(),
|
||||
"fileName": newName,
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/api/file/rename", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(Rename, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -125,7 +171,7 @@ func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
||||
"operation": true,
|
||||
"fileTrashInfoList": []File{f},
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/a/api/file/trash", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(Trash, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -135,42 +181,23 @@ func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
const DEFAULT int64 = 10485760
|
||||
var uploadFile io.Reader
|
||||
// const DEFAULT int64 = 10485760
|
||||
h := md5.New()
|
||||
if d.StreamUpload && stream.GetSize() > DEFAULT {
|
||||
// 只计算前10MIB
|
||||
buf := bytes.NewBuffer(make([]byte, 0, DEFAULT))
|
||||
if n, err := io.CopyN(io.MultiWriter(buf, h), stream, DEFAULT); err != io.EOF && n == 0 {
|
||||
return err
|
||||
}
|
||||
// 增加额外参数防止MD5碰撞
|
||||
h.Write([]byte(stream.GetName()))
|
||||
num := make([]byte, 8)
|
||||
binary.BigEndian.PutUint64(num, uint64(stream.GetSize()))
|
||||
h.Write(num)
|
||||
// 拼装
|
||||
uploadFile = io.MultiReader(buf, stream)
|
||||
} else {
|
||||
// 计算完整文件MD5
|
||||
tempFile, err := os.CreateTemp(conf.Conf.TempDir, "file-*")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
|
||||
if _, err = io.Copy(io.MultiWriter(tempFile, h), stream); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploadFile = tempFile
|
||||
// need to calculate md5 of the full content
|
||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
if _, err = io.Copy(h, tempFile); err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
etag := hex.EncodeToString(h.Sum(nil))
|
||||
data := base.Json{
|
||||
@ -183,45 +210,47 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
"type": 0,
|
||||
}
|
||||
var resp UploadResp
|
||||
_, err := d.request("https://www.123pan.com/api/file/upload_request", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
res, err := d.request(UploadRequest, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Data.Key == "" {
|
||||
log.Debugln("upload request res: ", string(res))
|
||||
if resp.Data.Reuse || resp.Data.Key == "" {
|
||||
return nil
|
||||
}
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
|
||||
Region: aws.String("123pan"),
|
||||
Endpoint: aws.String("file.123pan.com"),
|
||||
S3ForcePathStyle: aws.Bool(true),
|
||||
if resp.Data.AccessKeyId == "" || resp.Data.SecretAccessKey == "" || resp.Data.SessionToken == "" {
|
||||
err = d.newUpload(ctx, &resp, stream, tempFile, up)
|
||||
return err
|
||||
} else {
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
|
||||
Region: aws.String("123pan"),
|
||||
Endpoint: aws.String(resp.Data.EndPoint),
|
||||
S3ForcePathStyle: aws.Bool(true),
|
||||
}
|
||||
s, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploader := s3manager.NewUploader(s)
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Key,
|
||||
Body: tempFile,
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
}
|
||||
s, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploader := s3manager.NewUploader(s)
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Key,
|
||||
Body: uploadFile,
|
||||
}
|
||||
_, err = uploader.Upload(input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = d.request("https://www.123pan.com/api/file/upload_complete", http.MethodPost, func(req *resty.Request) {
|
||||
_, err = d.request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileId": resp.Data.FileId,
|
||||
})
|
||||
}).SetContext(ctx)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan123)(nil)
|
||||
|
@ -6,14 +6,12 @@ import (
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
OrderBy string `json:"order_by" type:"select" options:"name,fileId,updateAt,createAt" default:"name"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
driver.RootID
|
||||
// define other
|
||||
StreamUpload bool `json:"stream_upload"`
|
||||
//Field string `json:"field" type:"select" required:"true" options:"a,b,c" default:"a"`
|
||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
@ -21,10 +19,8 @@ var config = driver.Config{
|
||||
DefaultRoot: "0",
|
||||
}
|
||||
|
||||
func New() driver.Driver {
|
||||
return &Pan123{}
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(config, New)
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan123{}
|
||||
})
|
||||
}
|
||||
|
@ -1,24 +1,15 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type BaseResp struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type TokenResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Token string `json:"token"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
FileName string `json:"FileName"`
|
||||
Size int64 `json:"Size"`
|
||||
@ -54,7 +45,30 @@ func (f File) GetID() string {
|
||||
return strconv.FormatInt(f.FileId, 10)
|
||||
}
|
||||
|
||||
func (f File) Thumb() string {
|
||||
if f.DownloadUrl == "" {
|
||||
return ""
|
||||
}
|
||||
du, err := url.Parse(f.DownloadUrl)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
|
||||
query := du.Query()
|
||||
query.Set("w", "70")
|
||||
query.Set("h", "70")
|
||||
if !query.Has("type") {
|
||||
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
|
||||
}
|
||||
if !query.Has("trade_key") {
|
||||
query.Set("trade_key", "123pan-thumbnail")
|
||||
}
|
||||
du.RawQuery = query.Encode()
|
||||
return du.String()
|
||||
}
|
||||
|
||||
var _ model.Obj = (*File)(nil)
|
||||
var _ model.Thumb = (*File)(nil)
|
||||
|
||||
//func (f File) Thumb() string {
|
||||
//
|
||||
@ -62,22 +76,22 @@ var _ model.Obj = (*File)(nil)
|
||||
//var _ model.Thumb = (*File)(nil)
|
||||
|
||||
type Files struct {
|
||||
BaseResp
|
||||
//BaseResp
|
||||
Data struct {
|
||||
InfoList []File `json:"InfoList"`
|
||||
Next string `json:"Next"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type DownResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
DownloadUrl string `json:"DownloadUrl"`
|
||||
} `json:"data"`
|
||||
}
|
||||
//type DownResp struct {
|
||||
// //BaseResp
|
||||
// Data struct {
|
||||
// DownloadUrl string `json:"DownloadUrl"`
|
||||
// } `json:"data"`
|
||||
//}
|
||||
|
||||
type UploadResp struct {
|
||||
BaseResp
|
||||
//BaseResp
|
||||
Data struct {
|
||||
AccessKeyId string `json:"AccessKeyId"`
|
||||
Bucket string `json:"Bucket"`
|
||||
@ -85,5 +99,15 @@ type UploadResp struct {
|
||||
SecretAccessKey string `json:"SecretAccessKey"`
|
||||
SessionToken string `json:"SessionToken"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Reuse bool `json:"Reuse"`
|
||||
EndPoint string `json:"EndPoint"`
|
||||
StorageNode string `json:"StorageNode"`
|
||||
UploadId string `json:"UploadId"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type S3PreSignedURLs struct {
|
||||
Data struct {
|
||||
PreSignedUrls map[string]string `json:"presignedUrls"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
155
drivers/123/upload.go
Normal file
155
drivers/123/upload.go
Normal file
@ -0,0 +1,155 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func (d *Pan123) getS3PreSignedUrls(ctx context.Context, upReq *UploadResp, start, end int) (*S3PreSignedURLs, error) {
|
||||
data := base.Json{
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"key": upReq.Data.Key,
|
||||
"partNumberEnd": end,
|
||||
"partNumberStart": start,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
}
|
||||
var s3PreSignedUrls S3PreSignedURLs
|
||||
_, err := d.request(S3PreSignedUrls, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &s3PreSignedUrls)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s3PreSignedUrls, nil
|
||||
}
|
||||
|
||||
func (d *Pan123) getS3Auth(ctx context.Context, upReq *UploadResp, start, end int) (*S3PreSignedURLs, error) {
|
||||
data := base.Json{
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"key": upReq.Data.Key,
|
||||
"partNumberEnd": end,
|
||||
"partNumberStart": start,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
}
|
||||
var s3PreSignedUrls S3PreSignedURLs
|
||||
_, err := d.request(S3Auth, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &s3PreSignedUrls)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s3PreSignedUrls, nil
|
||||
}
|
||||
|
||||
func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp, file model.FileStreamer, isMultipart bool) error {
|
||||
data := base.Json{
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"fileId": upReq.Data.FileId,
|
||||
"fileSize": file.GetSize(),
|
||||
"isMultipart": isMultipart,
|
||||
"key": upReq.Data.Key,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
}
|
||||
_, err := d.request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.FileStreamer, reader io.Reader, up driver.UpdateProgress) error {
|
||||
chunkSize := int64(1024 * 1024 * 16)
|
||||
// fetch s3 pre signed urls
|
||||
chunkCount := int(math.Ceil(float64(file.GetSize()) / float64(chunkSize)))
|
||||
// only 1 batch is allowed
|
||||
isMultipart := chunkCount > 1
|
||||
batchSize := 1
|
||||
getS3UploadUrl := d.getS3Auth
|
||||
if isMultipart {
|
||||
batchSize = 10
|
||||
getS3UploadUrl = d.getS3PreSignedUrls
|
||||
}
|
||||
for i := 1; i <= chunkCount; i += batchSize {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
start := i
|
||||
end := i + batchSize
|
||||
if end > chunkCount+1 {
|
||||
end = chunkCount + 1
|
||||
}
|
||||
s3PreSignedUrls, err := getS3UploadUrl(ctx, upReq, start, end)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// upload each chunk
|
||||
for j := start; j < end; j++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
curSize := chunkSize
|
||||
if j == chunkCount {
|
||||
curSize = file.GetSize() - (int64(chunkCount)-1)*chunkSize
|
||||
}
|
||||
err = d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, j, end, io.LimitReader(reader, chunkSize), curSize, false, getS3UploadUrl)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(j * 100 / chunkCount)
|
||||
}
|
||||
}
|
||||
// complete s3 upload
|
||||
return d.completeS3(ctx, upReq, file, chunkCount > 1)
|
||||
}
|
||||
|
||||
func (d *Pan123) uploadS3Chunk(ctx context.Context, upReq *UploadResp, s3PreSignedUrls *S3PreSignedURLs, cur, end int, reader io.Reader, curSize int64, retry bool, getS3UploadUrl func(ctx context.Context, upReq *UploadResp, start int, end int) (*S3PreSignedURLs, error)) error {
|
||||
uploadUrl := s3PreSignedUrls.Data.PreSignedUrls[strconv.Itoa(cur)]
|
||||
if uploadUrl == "" {
|
||||
return fmt.Errorf("upload url is empty, s3PreSignedUrls: %+v", s3PreSignedUrls)
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.ContentLength = curSize
|
||||
//req.Header.Set("Content-Length", strconv.FormatInt(curSize, 10))
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode == http.StatusForbidden {
|
||||
if retry {
|
||||
return fmt.Errorf("upload s3 chunk %d failed, status code: %d", cur, res.StatusCode)
|
||||
}
|
||||
// refresh s3 pre signed urls
|
||||
newS3PreSignedUrls, err := getS3UploadUrl(ctx, upReq, cur, end)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s3PreSignedUrls.Data.PreSignedUrls = newS3PreSignedUrls.Data.PreSignedUrls
|
||||
// retry
|
||||
return d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, cur, end, reader, curSize, true, getS3UploadUrl)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return fmt.Errorf("upload s3 chunk %d failed, status code: %d, body: %s", cur, res.StatusCode, body)
|
||||
}
|
||||
return nil
|
||||
}
|
@ -4,49 +4,117 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
const (
|
||||
Api = "https://www.123pan.com/api"
|
||||
AApi = "https://www.123pan.com/a/api"
|
||||
BApi = "https://www.123pan.com/b/api"
|
||||
MainApi = Api
|
||||
SignIn = MainApi + "/user/sign_in"
|
||||
Logout = MainApi + "/user/logout"
|
||||
UserInfo = MainApi + "/user/info"
|
||||
FileList = MainApi + "/file/list/new"
|
||||
DownloadInfo = MainApi + "/file/download_info"
|
||||
Mkdir = MainApi + "/file/upload_request"
|
||||
Move = MainApi + "/file/mod_pid"
|
||||
Rename = MainApi + "/file/rename"
|
||||
Trash = MainApi + "/file/trash"
|
||||
UploadRequest = MainApi + "/file/upload_request"
|
||||
UploadComplete = MainApi + "/file/upload_complete"
|
||||
S3PreSignedUrls = MainApi + "/file/s3_repare_upload_parts_batch"
|
||||
S3Auth = MainApi + "/file/s3_upload_object/auth"
|
||||
UploadCompleteV2 = MainApi + "/file/upload_complete/v2"
|
||||
S3Complete = MainApi + "/file/s3_complete_multipart_upload"
|
||||
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||
)
|
||||
|
||||
func (d *Pan123) login() error {
|
||||
url := "https://www.123pan.com/api/user/sign_in"
|
||||
var resp TokenResp
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetBody(base.Json{
|
||||
var body base.Json
|
||||
if utils.IsEmailFormat(d.Username) {
|
||||
body = base.Json{
|
||||
"mail": d.Username,
|
||||
"password": d.Password,
|
||||
"type": 2,
|
||||
}
|
||||
} else {
|
||||
body = base.Json{
|
||||
"passport": d.Username,
|
||||
"password": d.Password,
|
||||
}).Post(url)
|
||||
"remember": true,
|
||||
}
|
||||
}
|
||||
res, err := base.RestyClient.R().
|
||||
SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"user-agent": "Dart/2.19(dart:io)",
|
||||
"platform": "android",
|
||||
"app-version": "36",
|
||||
//"user-agent": base.UserAgent,
|
||||
}).
|
||||
SetBody(body).Post(SignIn)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Code != 200 {
|
||||
err = fmt.Errorf(resp.Message)
|
||||
if utils.Json.Get(res.Body(), "code").ToInt() != 200 {
|
||||
err = fmt.Errorf(utils.Json.Get(res.Body(), "message").ToString())
|
||||
} else {
|
||||
d.AccessToken = resp.Data.Token
|
||||
d.AccessToken = utils.Json.Get(res.Body(), "data", "token").ToString()
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
//func authKey(reqUrl string) (*string, error) {
|
||||
// reqURL, err := url.Parse(reqUrl)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
//
|
||||
// nowUnix := time.Now().Unix()
|
||||
// random := rand.Intn(0x989680)
|
||||
//
|
||||
// p4 := fmt.Sprintf("%d|%d|%s|%s|%s|%s", nowUnix, random, reqURL.Path, "web", "3", AuthKeySalt)
|
||||
// authKey := fmt.Sprintf("%d-%d-%x", nowUnix, random, md5.Sum([]byte(p4)))
|
||||
// return &authKey, nil
|
||||
//}
|
||||
|
||||
func (d *Pan123) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
req.SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"authorization": "Bearer " + d.AccessToken,
|
||||
"user-agent": "Dart/2.19(dart:io)",
|
||||
"platform": "android",
|
||||
"app-version": "36",
|
||||
//"user-agent": base.UserAgent,
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
//authKey, err := authKey(url)
|
||||
//if err != nil {
|
||||
// return nil, err
|
||||
//}
|
||||
//req.SetQueryParam("auth-key", *authKey)
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
body := res.Body()
|
||||
code := jsoniter.Get(body, "code").ToInt()
|
||||
code := utils.Json.Get(body, "code").ToInt()
|
||||
if code != 0 {
|
||||
if code == 401 {
|
||||
err := d.login()
|
||||
@ -61,27 +129,31 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
||||
}
|
||||
|
||||
func (d *Pan123) getFiles(parentId string) ([]File, error) {
|
||||
next := "0"
|
||||
page := 1
|
||||
res := make([]File, 0)
|
||||
for next != "-1" {
|
||||
for {
|
||||
var resp Files
|
||||
query := map[string]string{
|
||||
"driveId": "0",
|
||||
"limit": "100",
|
||||
"next": next,
|
||||
"next": "0",
|
||||
"orderBy": d.OrderBy,
|
||||
"orderDirection": d.OrderDirection,
|
||||
"parentFileId": parentId,
|
||||
"trashed": "false",
|
||||
"Page": strconv.Itoa(page),
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/api/file/list/new", http.MethodGet, func(req *resty.Request) {
|
||||
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
next = resp.Data.Next
|
||||
page++
|
||||
res = append(res, resp.Data.InfoList...)
|
||||
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
|
||||
break
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
149
drivers/123_share/driver.go
Normal file
149
drivers/123_share/driver.go
Normal file
@ -0,0 +1,149 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Pan123Share struct {
|
||||
model.Storage
|
||||
Addition
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Pan123Share) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Init(ctx context.Context) error {
|
||||
// TODO login / refresh token
|
||||
//op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
// TODO return the files list, required
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return src, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
// TODO return link of file, required
|
||||
if f, ok := file.(File); ok {
|
||||
//var resp DownResp
|
||||
var headers map[string]string
|
||||
if !utils.IsLocalIPAddr(args.IP) {
|
||||
headers = map[string]string{
|
||||
//"X-Real-IP": "1.1.1.1",
|
||||
"X-Forwarded-For": args.IP,
|
||||
}
|
||||
}
|
||||
data := base.Json{
|
||||
"shareKey": d.ShareKey,
|
||||
"SharePwd": d.SharePwd,
|
||||
"etag": f.Etag,
|
||||
"fileId": f.FileId,
|
||||
"s3keyFlag": f.S3KeyFlag,
|
||||
"size": f.Size,
|
||||
}
|
||||
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetHeaders(headers)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
downloadUrl := utils.Json.Get(resp, "data", "DownloadURL").ToString()
|
||||
u, err := url.Parse(downloadUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nu := u.Query().Get("params")
|
||||
if nu != "" {
|
||||
du, _ := base64.StdEncoding.DecodeString(nu)
|
||||
u, err = url.Parse(string(du))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
u_ := u.String()
|
||||
log.Debug("download url: ", u_)
|
||||
res, err := base.NoRedirectClient.R().SetHeader("Referer", "https://www.123pan.com/").Get(u_)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debug(res.String())
|
||||
link := model.Link{
|
||||
URL: u_,
|
||||
}
|
||||
log.Debugln("res code: ", res.StatusCode())
|
||||
if res.StatusCode() == 302 {
|
||||
link.URL = res.Header().Get("location")
|
||||
} else if res.StatusCode() < 300 {
|
||||
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
|
||||
}
|
||||
link.Header = http.Header{
|
||||
"Referer": []string{"https://www.123pan.com/"},
|
||||
}
|
||||
return &link, nil
|
||||
}
|
||||
return nil, fmt.Errorf("can't convert obj")
|
||||
}
|
||||
|
||||
func (d *Pan123Share) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
// TODO create folder, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO move obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
// TODO rename obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO copy obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Remove(ctx context.Context, obj model.Obj) error {
|
||||
// TODO remove obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// TODO upload file, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
//func (d *Pan123Share) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
var _ driver.Driver = (*Pan123Share)(nil)
|
34
drivers/123_share/meta.go
Normal file
34
drivers/123_share/meta.go
Normal file
@ -0,0 +1,34 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
ShareKey string `json:"sharekey" required:"true"`
|
||||
SharePwd string `json:"sharepassword" required:"true"`
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "123PanShare",
|
||||
LocalSort: true,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: true,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "0",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan123Share{}
|
||||
})
|
||||
}
|
91
drivers/123_share/types.go
Normal file
91
drivers/123_share/types.go
Normal file
@ -0,0 +1,91 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type File struct {
|
||||
FileName string `json:"FileName"`
|
||||
Size int64 `json:"Size"`
|
||||
UpdateAt time.Time `json:"UpdateAt"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Type int `json:"Type"`
|
||||
Etag string `json:"Etag"`
|
||||
S3KeyFlag string `json:"S3KeyFlag"`
|
||||
DownloadUrl string `json:"DownloadUrl"`
|
||||
}
|
||||
|
||||
func (f File) GetPath() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f File) GetSize() int64 {
|
||||
return f.Size
|
||||
}
|
||||
|
||||
func (f File) GetName() string {
|
||||
return f.FileName
|
||||
}
|
||||
|
||||
func (f File) ModTime() time.Time {
|
||||
return f.UpdateAt
|
||||
}
|
||||
|
||||
func (f File) IsDir() bool {
|
||||
return f.Type == 1
|
||||
}
|
||||
|
||||
func (f File) GetID() string {
|
||||
return strconv.FormatInt(f.FileId, 10)
|
||||
}
|
||||
|
||||
func (f File) Thumb() string {
|
||||
if f.DownloadUrl == "" {
|
||||
return ""
|
||||
}
|
||||
du, err := url.Parse(f.DownloadUrl)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
|
||||
query := du.Query()
|
||||
query.Set("w", "70")
|
||||
query.Set("h", "70")
|
||||
if !query.Has("type") {
|
||||
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
|
||||
}
|
||||
if !query.Has("trade_key") {
|
||||
query.Set("trade_key", "123pan-thumbnail")
|
||||
}
|
||||
du.RawQuery = query.Encode()
|
||||
return du.String()
|
||||
}
|
||||
|
||||
var _ model.Obj = (*File)(nil)
|
||||
var _ model.Thumb = (*File)(nil)
|
||||
|
||||
//func (f File) Thumb() string {
|
||||
//
|
||||
//}
|
||||
//var _ model.Thumb = (*File)(nil)
|
||||
|
||||
type Files struct {
|
||||
//BaseResp
|
||||
Data struct {
|
||||
InfoList []File `json:"InfoList"`
|
||||
Next string `json:"Next"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
//type DownResp struct {
|
||||
// //BaseResp
|
||||
// Data struct {
|
||||
// DownloadUrl string `json:"DownloadUrl"`
|
||||
// } `json:"data"`
|
||||
//}
|
81
drivers/123_share/util.go
Normal file
81
drivers/123_share/util.go
Normal file
@ -0,0 +1,81 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
const (
|
||||
Api = "https://www.123pan.com/api"
|
||||
AApi = "https://www.123pan.com/a/api"
|
||||
BApi = "https://www.123pan.com/b/api"
|
||||
MainApi = Api
|
||||
FileList = MainApi + "/share/get"
|
||||
DownloadInfo = MainApi + "/share/download/info"
|
||||
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||
)
|
||||
|
||||
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"user-agent": "Dart/2.19(dart:io)",
|
||||
"platform": "android",
|
||||
"app-version": "36",
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
body := res.Body()
|
||||
code := utils.Json.Get(body, "code").ToInt()
|
||||
if code != 0 {
|
||||
return nil, errors.New(jsoniter.Get(body, "message").ToString())
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) getFiles(parentId string) ([]File, error) {
|
||||
page := 1
|
||||
res := make([]File, 0)
|
||||
for {
|
||||
var resp Files
|
||||
query := map[string]string{
|
||||
"limit": "100",
|
||||
"next": "0",
|
||||
"orderBy": d.OrderBy,
|
||||
"orderDirection": d.OrderDirection,
|
||||
"parentFileId": parentId,
|
||||
"Page": strconv.Itoa(page),
|
||||
"shareKey": d.ShareKey,
|
||||
"SharePwd": d.SharePwd,
|
||||
}
|
||||
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
page++
|
||||
res = append(res, resp.Data.InfoList...)
|
||||
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
|
||||
break
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// do others that not defined in Driver interface
|
@ -1,13 +1,13 @@
|
||||
package _139
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
@ -20,6 +20,7 @@ import (
|
||||
type Yun139 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
Account string
|
||||
}
|
||||
|
||||
func (d *Yun139) Config() driver.Config {
|
||||
@ -27,15 +28,23 @@ func (d *Yun139) Config() driver.Config {
|
||||
}
|
||||
|
||||
func (d *Yun139) GetAddition() driver.Additional {
|
||||
return d.Addition
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Yun139) Init(ctx context.Context, storage model.Storage) error {
|
||||
d.Storage = storage
|
||||
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
|
||||
func (d *Yun139) Init(ctx context.Context) error {
|
||||
if d.Authorization == "" {
|
||||
return fmt.Errorf("authorization is empty")
|
||||
}
|
||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
decodeStr := string(decode)
|
||||
splits := strings.Split(decodeStr, ":")
|
||||
if len(splits) < 2 {
|
||||
return fmt.Errorf("authorization is invalid, splits < 2")
|
||||
}
|
||||
d.Account = splits[1]
|
||||
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
||||
"qryUserExternInfoReq": base.Json{
|
||||
"commonAccountInfo": base.Json{
|
||||
@ -59,11 +68,6 @@ func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) (
|
||||
}
|
||||
}
|
||||
|
||||
//func (d *Yun139) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
// // this is optional
|
||||
// return nil, errs.NotImplement
|
||||
//}
|
||||
|
||||
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
u, err := d.getLink(file.GetID())
|
||||
if err != nil {
|
||||
@ -95,8 +99,7 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
||||
}
|
||||
pathname = "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
|
||||
}
|
||||
_, err := d.post(pathname,
|
||||
data, nil)
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
@ -234,15 +237,31 @@ func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return err
|
||||
}
|
||||
|
||||
const (
|
||||
_ = iota //ignore first value by assigning to blank identifier
|
||||
KB = 1 << (10 * iota)
|
||||
MB
|
||||
GB
|
||||
TB
|
||||
)
|
||||
|
||||
func getPartSize(size int64) int64 {
|
||||
// 网盘对于分片数量存在上限
|
||||
if size/GB > 30 {
|
||||
return 512 * MB
|
||||
}
|
||||
return 100 * MB
|
||||
}
|
||||
|
||||
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
data := base.Json{
|
||||
"manualRename": 2,
|
||||
"operation": 0,
|
||||
"fileCount": 1,
|
||||
"totalSize": stream.GetSize(),
|
||||
"totalSize": 0, // 去除上传大小限制
|
||||
"uploadContentList": []base.Json{{
|
||||
"contentName": stream.GetName(),
|
||||
"contentSize": stream.GetSize(),
|
||||
"contentSize": 0, // 去除上传大小限制
|
||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
}},
|
||||
"parentCatalogID": dstDir.GetID(),
|
||||
@ -260,10 +279,10 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
"operation": 0,
|
||||
"path": "",
|
||||
"seqNo": "",
|
||||
"totalSize": stream.GetSize(),
|
||||
"totalSize": 0,
|
||||
"uploadContentList": []base.Json{{
|
||||
"contentName": stream.GetName(),
|
||||
"contentSize": stream.GetSize(),
|
||||
"contentSize": 0,
|
||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
}},
|
||||
})
|
||||
@ -275,52 +294,54 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var Default int64 = 10485760
|
||||
part := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
|
||||
var start int64 = 0
|
||||
for i := 0; i < part; i++ {
|
||||
|
||||
// Progress
|
||||
p := driver.NewProgress(stream.GetSize(), up)
|
||||
|
||||
var partSize = getPartSize(stream.GetSize())
|
||||
part := (stream.GetSize() + partSize - 1) / partSize
|
||||
if part == 0 {
|
||||
part = 1
|
||||
}
|
||||
for i := int64(0); i < part; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
|
||||
start := i * partSize
|
||||
byteSize := stream.GetSize() - start
|
||||
if byteSize > Default {
|
||||
byteSize = Default
|
||||
if byteSize > partSize {
|
||||
byteSize = partSize
|
||||
}
|
||||
byteData := make([]byte, byteSize)
|
||||
_, err = io.ReadFull(stream, byteData)
|
||||
|
||||
limitReader := io.LimitReader(stream, byteSize)
|
||||
// Update Progress
|
||||
r := io.TeeReader(limitReader, p)
|
||||
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, bytes.NewBuffer(byteData))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
headers := map[string]string{
|
||||
"Accept": "*/*",
|
||||
"Content-Type": "text/plain;name=" + unicode(stream.GetName()),
|
||||
"contentSize": strconv.FormatInt(stream.GetSize(), 10),
|
||||
"range": fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1),
|
||||
"content-length": strconv.FormatInt(byteSize, 10),
|
||||
"uploadtaskID": resp.Data.UploadResult.UploadTaskID,
|
||||
"rangeType": "0",
|
||||
"Referer": "https://yun.139.com/",
|
||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36 Edg/95.0.1020.44",
|
||||
"x-SvcType": "1",
|
||||
}
|
||||
for k, v := range headers {
|
||||
req.Header.Set(k, v)
|
||||
}
|
||||
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
|
||||
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
|
||||
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
|
||||
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
|
||||
req.Header.Set("rangeType", "0")
|
||||
req.ContentLength = byteSize
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = res.Body.Close()
|
||||
log.Debugf("%+v", res)
|
||||
res.Body.Close()
|
||||
start += byteSize
|
||||
up(i * 100 / part)
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Yun139) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Yun139)(nil)
|
||||
|
@ -6,8 +6,8 @@ import (
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Account string `json:"account" required:"true"`
|
||||
Cookie string `json:"cookie" type:"text" required:"true"`
|
||||
//Account string `json:"account" required:"true"`
|
||||
Authorization string `json:"authorization" type:"text" required:"true"`
|
||||
driver.RootID
|
||||
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
|
||||
CloudID string `json:"cloud_id"`
|
||||
@ -19,7 +19,7 @@ var config = driver.Config{
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(config, func() driver.Driver {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Yun139{}
|
||||
})
|
||||
}
|
||||
|
@ -28,19 +28,22 @@ func (d *Yun139) isFamily() bool {
|
||||
func encodeURIComponent(str string) string {
|
||||
r := url.QueryEscape(str)
|
||||
r = strings.Replace(r, "+", "%20", -1)
|
||||
r = strings.Replace(r, "%21", "!", -1)
|
||||
r = strings.Replace(r, "%27", "'", -1)
|
||||
r = strings.Replace(r, "%28", "(", -1)
|
||||
r = strings.Replace(r, "%29", ")", -1)
|
||||
r = strings.Replace(r, "%2A", "*", -1)
|
||||
return r
|
||||
}
|
||||
|
||||
func calSign(body, ts, randStr string) string {
|
||||
body = strings.ReplaceAll(body, "\n", "")
|
||||
body = strings.ReplaceAll(body, " ", "")
|
||||
body = encodeURIComponent(body)
|
||||
strs := strings.Split(body, "")
|
||||
sort.Strings(strs)
|
||||
body = strings.Join(strs, "")
|
||||
body = base64.StdEncoding.EncodeToString([]byte(body))
|
||||
res := utils.GetMD5Encode(body) + utils.GetMD5Encode(ts+":"+randStr)
|
||||
res = strings.ToUpper(utils.GetMD5Encode(res))
|
||||
res := utils.GetMD5EncodeStr(body) + utils.GetMD5EncodeStr(ts+":"+randStr)
|
||||
res = strings.ToUpper(utils.GetMD5EncodeStr(res))
|
||||
return res
|
||||
}
|
||||
|
||||
@ -69,7 +72,7 @@ func (d *Yun139) request(pathname string, method string, callback base.ReqCallba
|
||||
req.SetHeaders(map[string]string{
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"CMS-DEVICE": "default",
|
||||
"Cookie": d.Cookie,
|
||||
"Authorization": "Basic " + d.Authorization,
|
||||
"mcloud-channel": "1000101",
|
||||
"mcloud-client": "10701",
|
||||
//"mcloud-route": "001",
|
||||
|
@ -7,7 +7,6 @@ import (
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
@ -27,21 +26,13 @@ func (d *Cloud189) Config() driver.Config {
|
||||
}
|
||||
|
||||
func (d *Cloud189) GetAddition() driver.Additional {
|
||||
return d.Addition
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Cloud189) Init(ctx context.Context, storage model.Storage) error {
|
||||
d.Storage = storage
|
||||
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.client = resty.New().
|
||||
SetTimeout(base.DefaultTimeout).
|
||||
SetRetryCount(3).
|
||||
SetHeader("Referer", "https://cloud.189.cn/").
|
||||
SetHeader("User-Agent", base.UserAgent)
|
||||
return d.login()
|
||||
func (d *Cloud189) Init(ctx context.Context) error {
|
||||
d.client = base.NewRestyClient().
|
||||
SetHeader("Referer", "https://cloud.189.cn/")
|
||||
return d.newLogin()
|
||||
}
|
||||
|
||||
func (d *Cloud189) Drop(ctx context.Context) error {
|
||||
@ -52,11 +43,6 @@ func (d *Cloud189) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
||||
return d.getFiles(dir.GetID())
|
||||
}
|
||||
|
||||
//func (d *Cloud189) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
// // this is optional
|
||||
// return nil, errs.NotImplement
|
||||
//}
|
||||
|
||||
func (d *Cloud189) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
var resp DownResp
|
||||
u := "https://cloud.189.cn/api/portal/getFileInfo.action"
|
||||
@ -205,11 +191,7 @@ func (d *Cloud189) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *Cloud189) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
return d.newUpload(dstDir, stream, up)
|
||||
}
|
||||
|
||||
func (d *Cloud189) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
return nil, errs.NotSupport
|
||||
return d.newUpload(ctx, dstDir, stream, up)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Cloud189)(nil)
|
||||
|
126
drivers/189/login.go
Normal file
126
drivers/189/login.go
Normal file
@ -0,0 +1,126 @@
|
||||
package _189
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type AppConf struct {
|
||||
Data struct {
|
||||
AccountType string `json:"accountType"`
|
||||
AgreementCheck string `json:"agreementCheck"`
|
||||
AppKey string `json:"appKey"`
|
||||
ClientType int `json:"clientType"`
|
||||
IsOauth2 bool `json:"isOauth2"`
|
||||
LoginSort string `json:"loginSort"`
|
||||
MailSuffix string `json:"mailSuffix"`
|
||||
PageKey string `json:"pageKey"`
|
||||
ParamId string `json:"paramId"`
|
||||
RegReturnUrl string `json:"regReturnUrl"`
|
||||
ReqId string `json:"reqId"`
|
||||
ReturnUrl string `json:"returnUrl"`
|
||||
ShowFeedback string `json:"showFeedback"`
|
||||
ShowPwSaveName string `json:"showPwSaveName"`
|
||||
ShowQrSaveName string `json:"showQrSaveName"`
|
||||
ShowSmsSaveName string `json:"showSmsSaveName"`
|
||||
Sso string `json:"sso"`
|
||||
} `json:"data"`
|
||||
Msg string `json:"msg"`
|
||||
Result string `json:"result"`
|
||||
}
|
||||
|
||||
type EncryptConf struct {
|
||||
Result int `json:"result"`
|
||||
Data struct {
|
||||
UpSmsOn string `json:"upSmsOn"`
|
||||
Pre string `json:"pre"`
|
||||
PreDomain string `json:"preDomain"`
|
||||
PubKey string `json:"pubKey"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
func (d *Cloud189) newLogin() error {
|
||||
url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
|
||||
res, err := d.client.R().Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Is logged in
|
||||
redirectURL := res.RawResponse.Request.URL
|
||||
if redirectURL.String() == "https://cloud.189.cn/web/main" {
|
||||
return nil
|
||||
}
|
||||
lt := redirectURL.Query().Get("lt")
|
||||
reqId := redirectURL.Query().Get("reqId")
|
||||
appId := redirectURL.Query().Get("appId")
|
||||
headers := map[string]string{
|
||||
"lt": lt,
|
||||
"reqid": reqId,
|
||||
"referer": redirectURL.String(),
|
||||
"origin": "https://open.e.189.cn",
|
||||
}
|
||||
// get app Conf
|
||||
var appConf AppConf
|
||||
res, err = d.client.R().SetHeaders(headers).SetFormData(map[string]string{
|
||||
"version": "2.0",
|
||||
"appKey": appId,
|
||||
}).SetResult(&appConf).Post("https://open.e.189.cn/api/logbox/oauth2/appConf.do")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("189 AppConf resp body: %s", res.String())
|
||||
if appConf.Result != "0" {
|
||||
return errors.New(appConf.Msg)
|
||||
}
|
||||
// get encrypt conf
|
||||
var encryptConf EncryptConf
|
||||
res, err = d.client.R().SetHeaders(headers).SetFormData(map[string]string{
|
||||
"appId": appId,
|
||||
}).Post("https://open.e.189.cn/api/logbox/config/encryptConf.do")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = utils.Json.Unmarshal(res.Body(), &encryptConf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("189 EncryptConf resp body: %s\n%+v", res.String(), encryptConf)
|
||||
if encryptConf.Result != 0 {
|
||||
return errors.New("get EncryptConf error:" + res.String())
|
||||
}
|
||||
// TODO: getUUID? needcaptcha
|
||||
// login
|
||||
loginData := map[string]string{
|
||||
"version": "v2.0",
|
||||
"apToken": "",
|
||||
"appKey": appId,
|
||||
"accountType": appConf.Data.AccountType,
|
||||
"userName": encryptConf.Data.Pre + RsaEncode([]byte(d.Username), encryptConf.Data.PubKey, true),
|
||||
"epd": encryptConf.Data.Pre + RsaEncode([]byte(d.Password), encryptConf.Data.PubKey, true),
|
||||
"captchaType": "",
|
||||
"validateCode": "",
|
||||
"smsValidateCode": "",
|
||||
"captchaToken": "",
|
||||
"returnUrl": appConf.Data.ReturnUrl,
|
||||
"mailSuffix": appConf.Data.MailSuffix,
|
||||
"dynamicCheck": "FALSE",
|
||||
"clientType": strconv.Itoa(appConf.Data.ClientType),
|
||||
"cb_SaveName": "3",
|
||||
"isOauth2": strconv.FormatBool(appConf.Data.IsOauth2),
|
||||
"state": "",
|
||||
"paramId": appConf.Data.ParamId,
|
||||
}
|
||||
res, err = d.client.R().SetHeaders(headers).SetFormData(loginData).Post("https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("189 login resp body: %s", res.String())
|
||||
loginResult := utils.Json.Get(res.Body(), "result").ToInt()
|
||||
if loginResult != 0 {
|
||||
return errors.New(utils.Json.Get(res.Body(), "msg").ToString())
|
||||
}
|
||||
return nil
|
||||
}
|
@ -8,6 +8,7 @@ import (
|
||||
type Addition struct {
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
Cookie string `json:"cookie" help:"Fill in the cookie if need captcha"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
@ -15,10 +16,11 @@ var config = driver.Config{
|
||||
Name: "189Cloud",
|
||||
LocalSort: true,
|
||||
DefaultRoot: "-11",
|
||||
Alert: `info|You can try to use 189PC driver if this driver does not work.`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(config, func() driver.Driver {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Cloud189{}
|
||||
})
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package _189
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
@ -10,16 +11,13 @@ import (
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/setting"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
myrand "github.com/alist-org/alist/v3/pkg/utils/random"
|
||||
"github.com/go-resty/resty/v2"
|
||||
@ -29,118 +27,118 @@ import (
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
func (d *Cloud189) login() error {
|
||||
url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
|
||||
b := ""
|
||||
lt := ""
|
||||
ltText := regexp.MustCompile(`lt = "(.+?)"`)
|
||||
var res *resty.Response
|
||||
var err error
|
||||
for i := 0; i < 3; i++ {
|
||||
res, err = d.client.R().Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 已经登陆
|
||||
if res.RawResponse.Request.URL.String() == "https://cloud.189.cn/web/main" {
|
||||
return nil
|
||||
}
|
||||
b = res.String()
|
||||
ltTextArr := ltText.FindStringSubmatch(b)
|
||||
if len(ltTextArr) > 0 {
|
||||
lt = ltTextArr[1]
|
||||
break
|
||||
} else {
|
||||
<-time.After(time.Second)
|
||||
}
|
||||
}
|
||||
if lt == "" {
|
||||
return fmt.Errorf("get page: %s \nstatus: %d \nrequest url: %s\nredirect url: %s",
|
||||
b, res.StatusCode(), res.RawResponse.Request.URL.String(), res.Header().Get("location"))
|
||||
}
|
||||
captchaToken := regexp.MustCompile(`captchaToken' value='(.+?)'`).FindStringSubmatch(b)[1]
|
||||
returnUrl := regexp.MustCompile(`returnUrl = '(.+?)'`).FindStringSubmatch(b)[1]
|
||||
paramId := regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||
//reqId := regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||
jRsakey := regexp.MustCompile(`j_rsaKey" value="(\S+)"`).FindStringSubmatch(b)[1]
|
||||
vCodeID := regexp.MustCompile(`picCaptcha\.do\?token\=([A-Za-z0-9\&\=]+)`).FindStringSubmatch(b)[1]
|
||||
vCodeRS := ""
|
||||
if vCodeID != "" {
|
||||
// need ValidateCode
|
||||
log.Debugf("try to identify verification codes")
|
||||
timeStamp := strconv.FormatInt(time.Now().UnixNano()/1e6, 10)
|
||||
u := "https://open.e.189.cn/api/logbox/oauth2/picCaptcha.do?token=" + vCodeID + timeStamp
|
||||
imgRes, err := d.client.R().SetHeaders(map[string]string{
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0",
|
||||
"Referer": "https://open.e.189.cn/api/logbox/oauth2/unifyAccountLogin.do",
|
||||
"Sec-Fetch-Dest": "image",
|
||||
"Sec-Fetch-Mode": "no-cors",
|
||||
"Sec-Fetch-Site": "same-origin",
|
||||
}).Get(u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Enter the verification code manually
|
||||
//err = message.GetMessenger().WaitSend(message.Message{
|
||||
// Type: "image",
|
||||
// Content: "data:image/png;base64," + base64.StdEncoding.EncodeToString(imgRes.Body()),
|
||||
//}, 10)
|
||||
//if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//vCodeRS, err = message.GetMessenger().WaitReceive(30)
|
||||
// use ocr api
|
||||
vRes, err := base.RestyClient.R().SetMultipartField(
|
||||
"image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
|
||||
Post(setting.GetStr(conf.OcrApi))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if jsoniter.Get(vRes.Body(), "status").ToInt() != 200 {
|
||||
return errors.New("ocr error:" + jsoniter.Get(vRes.Body(), "msg").ToString())
|
||||
}
|
||||
vCodeRS = jsoniter.Get(vRes.Body(), "result").ToString()
|
||||
log.Debugln("code: ", vCodeRS)
|
||||
}
|
||||
userRsa := RsaEncode([]byte(d.Username), jRsakey, true)
|
||||
passwordRsa := RsaEncode([]byte(d.Password), jRsakey, true)
|
||||
url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
|
||||
var loginResp LoginResp
|
||||
res, err = d.client.R().
|
||||
SetHeaders(map[string]string{
|
||||
"lt": lt,
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
|
||||
"Referer": "https://open.e.189.cn/",
|
||||
"accept": "application/json;charset=UTF-8",
|
||||
}).SetFormData(map[string]string{
|
||||
"appKey": "cloud",
|
||||
"accountType": "01",
|
||||
"userName": "{RSA}" + userRsa,
|
||||
"password": "{RSA}" + passwordRsa,
|
||||
"validateCode": vCodeRS,
|
||||
"captchaToken": captchaToken,
|
||||
"returnUrl": returnUrl,
|
||||
"mailSuffix": "@pan.cn",
|
||||
"paramId": paramId,
|
||||
"clientType": "10010",
|
||||
"dynamicCheck": "FALSE",
|
||||
"cb_SaveName": "1",
|
||||
"isOauth2": "false",
|
||||
}).Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = utils.Json.Unmarshal(res.Body(), &loginResp)
|
||||
if err != nil {
|
||||
log.Error(err.Error())
|
||||
return err
|
||||
}
|
||||
if loginResp.Result != 0 {
|
||||
return fmt.Errorf(loginResp.Msg)
|
||||
}
|
||||
_, err = d.client.R().Get(loginResp.ToUrl)
|
||||
return err
|
||||
}
|
||||
//func (d *Cloud189) login() error {
|
||||
// url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
|
||||
// b := ""
|
||||
// lt := ""
|
||||
// ltText := regexp.MustCompile(`lt = "(.+?)"`)
|
||||
// var res *resty.Response
|
||||
// var err error
|
||||
// for i := 0; i < 3; i++ {
|
||||
// res, err = d.client.R().Get(url)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// // 已经登陆
|
||||
// if res.RawResponse.Request.URL.String() == "https://cloud.189.cn/web/main" {
|
||||
// return nil
|
||||
// }
|
||||
// b = res.String()
|
||||
// ltTextArr := ltText.FindStringSubmatch(b)
|
||||
// if len(ltTextArr) > 0 {
|
||||
// lt = ltTextArr[1]
|
||||
// break
|
||||
// } else {
|
||||
// <-time.After(time.Second)
|
||||
// }
|
||||
// }
|
||||
// if lt == "" {
|
||||
// return fmt.Errorf("get page: %s \nstatus: %d \nrequest url: %s\nredirect url: %s",
|
||||
// b, res.StatusCode(), res.RawResponse.Request.URL.String(), res.Header().Get("location"))
|
||||
// }
|
||||
// captchaToken := regexp.MustCompile(`captchaToken' value='(.+?)'`).FindStringSubmatch(b)[1]
|
||||
// returnUrl := regexp.MustCompile(`returnUrl = '(.+?)'`).FindStringSubmatch(b)[1]
|
||||
// paramId := regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||
// //reqId := regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||
// jRsakey := regexp.MustCompile(`j_rsaKey" value="(\S+)"`).FindStringSubmatch(b)[1]
|
||||
// vCodeID := regexp.MustCompile(`picCaptcha\.do\?token\=([A-Za-z0-9\&\=]+)`).FindStringSubmatch(b)[1]
|
||||
// vCodeRS := ""
|
||||
// if vCodeID != "" {
|
||||
// // need ValidateCode
|
||||
// log.Debugf("try to identify verification codes")
|
||||
// timeStamp := strconv.FormatInt(time.Now().UnixNano()/1e6, 10)
|
||||
// u := "https://open.e.189.cn/api/logbox/oauth2/picCaptcha.do?token=" + vCodeID + timeStamp
|
||||
// imgRes, err := d.client.R().SetHeaders(map[string]string{
|
||||
// "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0",
|
||||
// "Referer": "https://open.e.189.cn/api/logbox/oauth2/unifyAccountLogin.do",
|
||||
// "Sec-Fetch-Dest": "image",
|
||||
// "Sec-Fetch-Mode": "no-cors",
|
||||
// "Sec-Fetch-Site": "same-origin",
|
||||
// }).Get(u)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// // Enter the verification code manually
|
||||
// //err = message.GetMessenger().WaitSend(message.Message{
|
||||
// // Type: "image",
|
||||
// // Content: "data:image/png;base64," + base64.StdEncoding.EncodeToString(imgRes.Body()),
|
||||
// //}, 10)
|
||||
// //if err != nil {
|
||||
// // return err
|
||||
// //}
|
||||
// //vCodeRS, err = message.GetMessenger().WaitReceive(30)
|
||||
// // use ocr api
|
||||
// vRes, err := base.RestyClient.R().SetMultipartField(
|
||||
// "image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
|
||||
// Post(setting.GetStr(conf.OcrApi))
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// if jsoniter.Get(vRes.Body(), "status").ToInt() != 200 {
|
||||
// return errors.New("ocr error:" + jsoniter.Get(vRes.Body(), "msg").ToString())
|
||||
// }
|
||||
// vCodeRS = jsoniter.Get(vRes.Body(), "result").ToString()
|
||||
// log.Debugln("code: ", vCodeRS)
|
||||
// }
|
||||
// userRsa := RsaEncode([]byte(d.Username), jRsakey, true)
|
||||
// passwordRsa := RsaEncode([]byte(d.Password), jRsakey, true)
|
||||
// url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
|
||||
// var loginResp LoginResp
|
||||
// res, err = d.client.R().
|
||||
// SetHeaders(map[string]string{
|
||||
// "lt": lt,
|
||||
// "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
|
||||
// "Referer": "https://open.e.189.cn/",
|
||||
// "accept": "application/json;charset=UTF-8",
|
||||
// }).SetFormData(map[string]string{
|
||||
// "appKey": "cloud",
|
||||
// "accountType": "01",
|
||||
// "userName": "{RSA}" + userRsa,
|
||||
// "password": "{RSA}" + passwordRsa,
|
||||
// "validateCode": vCodeRS,
|
||||
// "captchaToken": captchaToken,
|
||||
// "returnUrl": returnUrl,
|
||||
// "mailSuffix": "@pan.cn",
|
||||
// "paramId": paramId,
|
||||
// "clientType": "10010",
|
||||
// "dynamicCheck": "FALSE",
|
||||
// "cb_SaveName": "1",
|
||||
// "isOauth2": "false",
|
||||
// }).Post(url)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// err = utils.Json.Unmarshal(res.Body(), &loginResp)
|
||||
// if err != nil {
|
||||
// log.Error(err.Error())
|
||||
// return err
|
||||
// }
|
||||
// if loginResp.Result != 0 {
|
||||
// return fmt.Errorf(loginResp.Msg)
|
||||
// }
|
||||
// _, err = d.client.R().Get(loginResp.ToUrl)
|
||||
// return err
|
||||
//}
|
||||
|
||||
func (d *Cloud189) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
var e Error
|
||||
@ -162,7 +160,7 @@ func (d *Cloud189) request(url string, method string, callback base.ReqCallback,
|
||||
//log.Debug(res.String())
|
||||
if e.ErrorCode != "" {
|
||||
if e.ErrorCode == "InvalidSessionKey" {
|
||||
err = d.login()
|
||||
err = d.newLogin()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -178,7 +176,6 @@ func (d *Cloud189) request(url string, method string, callback base.ReqCallback,
|
||||
func (d *Cloud189) getFiles(fileId string) ([]model.Obj, error) {
|
||||
res := make([]model.Obj, 0)
|
||||
pageNum := 1
|
||||
loc, _ := time.LoadLocation("Local")
|
||||
for {
|
||||
var resp Files
|
||||
_, err := d.request("https://cloud.189.cn/api/open/file/listFiles.action", http.MethodGet, func(req *resty.Request) {
|
||||
@ -200,7 +197,7 @@ func (d *Cloud189) getFiles(fileId string) ([]model.Obj, error) {
|
||||
break
|
||||
}
|
||||
for _, folder := range resp.FileListAO.FolderList {
|
||||
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05", folder.LastOpTime, loc)
|
||||
lastOpTime := utils.MustParseCNTime(folder.LastOpTime)
|
||||
res = append(res, &model.Object{
|
||||
ID: strconv.FormatInt(folder.Id, 10),
|
||||
Name: folder.Name,
|
||||
@ -209,12 +206,13 @@ func (d *Cloud189) getFiles(fileId string) ([]model.Obj, error) {
|
||||
})
|
||||
}
|
||||
for _, file := range resp.FileListAO.FileList {
|
||||
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05", file.LastOpTime, loc)
|
||||
lastOpTime := utils.MustParseCNTime(file.LastOpTime)
|
||||
res = append(res, &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: strconv.FormatInt(file.Id, 10),
|
||||
Name: file.Name,
|
||||
Modified: lastOpTime,
|
||||
Size: file.Size,
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: file.Icon.SmallUrl},
|
||||
})
|
||||
@ -306,7 +304,7 @@ func (d *Cloud189) uploadRequest(uri string, form map[string]string, resp interf
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (d *Cloud189) newUpload(dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
|
||||
func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
|
||||
sessionKey, err := d.getSessionKey()
|
||||
if err != nil {
|
||||
return err
|
||||
@ -335,6 +333,9 @@ func (d *Cloud189) newUpload(dstDir model.Obj, file model.FileStreamer, up drive
|
||||
md5s := make([]string, 0)
|
||||
md5Sum := md5.New()
|
||||
for i = 1; i <= count; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
byteSize = file.GetSize() - finish
|
||||
if DEFAULT < byteSize {
|
||||
byteSize = DEFAULT
|
||||
@ -364,12 +365,15 @@ func (d *Cloud189) newUpload(dstDir model.Obj, file model.FileStreamer, up drive
|
||||
log.Debugf("uploadData: %+v", uploadData)
|
||||
requestURL := uploadData.RequestURL
|
||||
uploadHeaders := strings.Split(decodeURIComponent(uploadData.RequestHeader), "&")
|
||||
req, _ := http.NewRequest(http.MethodPut, requestURL, bytes.NewReader(byteData))
|
||||
req, err := http.NewRequest(http.MethodPut, requestURL, bytes.NewReader(byteData))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
for _, v := range uploadHeaders {
|
||||
i := strings.Index(v, "=")
|
||||
req.Header.Set(v[0:i], v[i+1:])
|
||||
}
|
||||
|
||||
r, err := base.HttpClient.Do(req)
|
||||
log.Debugf("%+v %+v", r, r.Request.Header)
|
||||
r.Body.Close()
|
||||
@ -381,7 +385,7 @@ func (d *Cloud189) newUpload(dstDir model.Obj, file model.FileStreamer, up drive
|
||||
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
|
||||
sliceMd5 := fileMd5
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5 = utils.GetMD5Encode(strings.Join(md5s, "\n"))
|
||||
sliceMd5 = utils.GetMD5EncodeStr(strings.Join(md5s, "\n"))
|
||||
}
|
||||
res, err = d.uploadRequest("/person/commitMultiUploadFile", map[string]string{
|
||||
"uploadFileId": uploadFileId,
|
||||
|
@ -3,8 +3,6 @@ package _189pc
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@ -16,33 +14,27 @@ import (
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
type Yun189PC struct {
|
||||
type Cloud189PC struct {
|
||||
model.Storage
|
||||
Addition
|
||||
|
||||
identity string
|
||||
|
||||
client *resty.Client
|
||||
putClient *resty.Client
|
||||
client *resty.Client
|
||||
|
||||
loginParam *LoginParam
|
||||
tokenInfo *AppSessionResp
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Config() driver.Config {
|
||||
func (y *Cloud189PC) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (y *Yun189PC) GetAddition() driver.Additional {
|
||||
return y.Addition
|
||||
func (y *Cloud189PC) GetAddition() driver.Additional {
|
||||
return &y.Addition
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Init(ctx context.Context, storage model.Storage) (err error) {
|
||||
y.Storage = storage
|
||||
if err = utils.Json.UnmarshalFromString(y.Storage.Addition, &y.Addition); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
||||
// 处理个人云和家庭云参数
|
||||
if y.isFamily() && y.RootFolderID == "-11" {
|
||||
y.RootFolderID = ""
|
||||
@ -59,12 +51,9 @@ func (y *Yun189PC) Init(ctx context.Context, storage model.Storage) (err error)
|
||||
"Referer": WEB_URL,
|
||||
})
|
||||
}
|
||||
if y.putClient == nil {
|
||||
y.putClient = base.NewRestyClient().SetTimeout(120 * time.Second)
|
||||
}
|
||||
|
||||
// 避免重复登陆
|
||||
identity := utils.GetMD5Encode(y.Username + y.Password)
|
||||
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
|
||||
if !y.isLogin() || y.identity != identity {
|
||||
y.identity = identity
|
||||
if err = y.login(); err != nil {
|
||||
@ -81,15 +70,15 @@ func (y *Yun189PC) Init(ctx context.Context, storage model.Storage) (err error)
|
||||
return
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Drop(ctx context.Context) error {
|
||||
func (y *Cloud189PC) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (y *Yun189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
func (y *Cloud189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
return y.getFiles(ctx, dir.GetID())
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
var downloadUrl struct {
|
||||
URL string `json:"fileDownloadUrl"`
|
||||
}
|
||||
@ -134,26 +123,28 @@ func (y *Yun189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs
|
||||
"User-Agent": []string{base.UserAgent},
|
||||
},
|
||||
}
|
||||
|
||||
// 获取链接有效时常
|
||||
strs := regexp.MustCompile(`(?i)expire[^=]*=([0-9]*)`).FindStringSubmatch(downloadUrl.URL)
|
||||
if len(strs) == 2 {
|
||||
timestamp, err := strconv.ParseInt(strs[1], 10, 64)
|
||||
if err == nil {
|
||||
expired := time.Duration(timestamp-time.Now().Unix()) * time.Second
|
||||
like.Expiration = &expired
|
||||
/*
|
||||
// 获取链接有效时常
|
||||
strs := regexp.MustCompile(`(?i)expire[^=]*=([0-9]*)`).FindStringSubmatch(downloadUrl.URL)
|
||||
if len(strs) == 2 {
|
||||
timestamp, err := strconv.ParseInt(strs[1], 10, 64)
|
||||
if err == nil {
|
||||
expired := time.Duration(timestamp-time.Now().Unix()) * time.Second
|
||||
like.Expiration = &expired
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
return like, nil
|
||||
}
|
||||
|
||||
func (y *Yun189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
fullUrl := API_URL
|
||||
if y.isFamily() {
|
||||
fullUrl += "/family/file"
|
||||
}
|
||||
fullUrl += "/createFolder.action"
|
||||
|
||||
var newFolder Cloud189Folder
|
||||
_, err := y.post(fullUrl, func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
req.SetQueryParams(map[string]string{
|
||||
@ -170,11 +161,15 @@ func (y *Yun189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName str
|
||||
"parentFolderId": parentDir.GetID(),
|
||||
})
|
||||
}
|
||||
}, nil)
|
||||
return err
|
||||
}, &newFolder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &newFolder, nil
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
var resp CreateBatchTaskResp
|
||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
req.SetFormData(map[string]string{
|
||||
@ -194,11 +189,17 @@ func (y *Yun189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
"familyId": y.FamilyID,
|
||||
})
|
||||
}
|
||||
}, nil)
|
||||
return err
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = y.WaitBatchTask("MOVE", resp.TaskID, time.Millisecond*400); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
queryParam := make(map[string]string)
|
||||
fullUrl := API_URL
|
||||
method := http.MethodPost
|
||||
@ -207,23 +208,34 @@ func (y *Yun189PC) Rename(ctx context.Context, srcObj model.Obj, newName string)
|
||||
method = http.MethodGet
|
||||
queryParam["familyId"] = y.FamilyID
|
||||
}
|
||||
if srcObj.IsDir() {
|
||||
fullUrl += "/renameFolder.action"
|
||||
queryParam["folderId"] = srcObj.GetID()
|
||||
queryParam["destFolderName"] = newName
|
||||
} else {
|
||||
|
||||
var newObj model.Obj
|
||||
switch f := srcObj.(type) {
|
||||
case *Cloud189File:
|
||||
fullUrl += "/renameFile.action"
|
||||
queryParam["fileId"] = srcObj.GetID()
|
||||
queryParam["destFileName"] = newName
|
||||
newObj = &Cloud189File{Icon: f.Icon} // 复用预览
|
||||
case *Cloud189Folder:
|
||||
fullUrl += "/renameFolder.action"
|
||||
queryParam["folderId"] = srcObj.GetID()
|
||||
queryParam["destFolderName"] = newName
|
||||
newObj = &Cloud189Folder{}
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
|
||||
_, err := y.request(fullUrl, method, func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
req.SetQueryParams(queryParam)
|
||||
}, nil, nil)
|
||||
return err
|
||||
req.SetContext(ctx).SetQueryParams(queryParam)
|
||||
}, nil, newObj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newObj, nil
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
var resp CreateBatchTaskResp
|
||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
req.SetFormData(map[string]string{
|
||||
@ -244,11 +256,15 @@ func (y *Yun189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
"familyId": y.FamilyID,
|
||||
})
|
||||
}
|
||||
}, nil)
|
||||
return err
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return y.WaitBatchTask("COPY", resp.TaskID, time.Second)
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Remove(ctx context.Context, obj model.Obj) error {
|
||||
func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
||||
var resp CreateBatchTaskResp
|
||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
req.SetFormData(map[string]string{
|
||||
@ -268,17 +284,26 @@ func (y *Yun189PC) Remove(ctx context.Context, obj model.Obj) error {
|
||||
"familyId": y.FamilyID,
|
||||
})
|
||||
}
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
if y.RapidUpload {
|
||||
return y.FastUpload(ctx, dstDir, stream, up)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return y.CommonUpload(ctx, dstDir, stream, up)
|
||||
// 批量任务数量限制,过快会导致无法删除
|
||||
return y.WaitBatchTask("DELETE", resp.TaskID, time.Millisecond*200)
|
||||
}
|
||||
|
||||
func (y *Yun189PC) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
return nil, errs.NotSupport
|
||||
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
switch y.UploadMethod {
|
||||
case "old":
|
||||
return y.OldUpload(ctx, dstDir, stream, up)
|
||||
case "rapid":
|
||||
return y.FastUpload(ctx, dstDir, stream, up)
|
||||
case "stream":
|
||||
if stream.GetSize() == 0 {
|
||||
return y.FastUpload(ctx, dstDir, stream, up)
|
||||
}
|
||||
fallthrough
|
||||
default:
|
||||
return y.StreamUpload(ctx, dstDir, stream, up)
|
||||
}
|
||||
}
|
||||
|
@ -10,7 +10,9 @@ import (
|
||||
"crypto/x509"
|
||||
"encoding/hex"
|
||||
"encoding/pem"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"math"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
@ -78,11 +80,59 @@ func timestamp() int64 {
|
||||
}
|
||||
|
||||
func MustParseTime(str string) *time.Time {
|
||||
loc, _ := time.LoadLocation("Asia/Shanghai")
|
||||
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05", str, loc)
|
||||
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05 -07", str+" +08", time.Local)
|
||||
return &lastOpTime
|
||||
}
|
||||
|
||||
type Time time.Time
|
||||
|
||||
func (t *Time) UnmarshalJSON(b []byte) error { return t.Unmarshal(b) }
|
||||
func (t *Time) UnmarshalXML(e *xml.Decoder, ee xml.StartElement) error {
|
||||
b, err := e.Token()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if b, ok := b.(xml.CharData); ok {
|
||||
if err = t.Unmarshal(b); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return e.Skip()
|
||||
}
|
||||
func (t *Time) Unmarshal(b []byte) error {
|
||||
bs := strings.Trim(string(b), "\"")
|
||||
var v time.Time
|
||||
var err error
|
||||
for _, f := range []string{"2006-01-02 15:04:05 -07", "Jan 2, 2006 15:04:05 PM -07"} {
|
||||
v, err = time.ParseInLocation(f, bs+" +08", time.Local)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
*t = Time(v)
|
||||
return err
|
||||
}
|
||||
|
||||
type String string
|
||||
|
||||
func (t *String) UnmarshalJSON(b []byte) error { return t.Unmarshal(b) }
|
||||
func (t *String) UnmarshalXML(e *xml.Decoder, ee xml.StartElement) error {
|
||||
b, err := e.Token()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if b, ok := b.(xml.CharData); ok {
|
||||
if err = t.Unmarshal(b); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return e.Skip()
|
||||
}
|
||||
func (s *String) Unmarshal(b []byte) error {
|
||||
*s = String(bytes.Trim(b, "\""))
|
||||
return nil
|
||||
}
|
||||
|
||||
func toFamilyOrderBy(o string) string {
|
||||
switch o {
|
||||
case "filename":
|
||||
@ -111,7 +161,9 @@ func ParseHttpHeader(str string) map[string]string {
|
||||
header := make(map[string]string)
|
||||
for _, value := range strings.Split(str, "&") {
|
||||
i := strings.Index(value, "=")
|
||||
header[strings.TrimSpace(value[0:i])] = strings.TrimSpace(value[i+1:])
|
||||
if i > 0 {
|
||||
header[strings.TrimSpace(value[0:i])] = strings.TrimSpace(value[i+1:])
|
||||
}
|
||||
}
|
||||
return header
|
||||
}
|
||||
@ -120,13 +172,24 @@ func MustString(str string, err error) string {
|
||||
return str
|
||||
}
|
||||
|
||||
func MustToBytes(b []byte, err error) []byte {
|
||||
return b
|
||||
}
|
||||
|
||||
func BoolToNumber(b bool) int {
|
||||
if b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// 计算分片大小
|
||||
// 对分片数量有限制
|
||||
// 10MIB 20 MIB 999片
|
||||
// 50MIB 60MIB 70MIB 80MIB ∞MIB 1999片
|
||||
func partSize(size int64) int64 {
|
||||
const DEFAULT = 1024 * 1024 * 10 // 10MIB
|
||||
if size > DEFAULT*2*999 {
|
||||
return int64(math.Max(math.Ceil((float64(size)/1999) /*=单个切片大小*/ /float64(DEFAULT)) /*=倍率*/, 5) * DEFAULT)
|
||||
}
|
||||
if size > DEFAULT*999 {
|
||||
return DEFAULT * 2 // 20MIB
|
||||
}
|
||||
return DEFAULT
|
||||
}
|
||||
|
@ -6,28 +6,26 @@ import (
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
VCode string `json:"validate_code"`
|
||||
RootFolderID string `json:"root_folder_id"`
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
VCode string `json:"validate_code"`
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"filename,filesize,lastOpTime" default:"filename"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
|
||||
FamilyID string `json:"family_id"`
|
||||
RapidUpload bool `json:"rapid_upload"`
|
||||
}
|
||||
|
||||
func (a Addition) GetRootId() string {
|
||||
return a.RootFolderID
|
||||
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
|
||||
NoUseOcr bool `json:"no_use_ocr"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "189CloudPC",
|
||||
DefaultRoot: "-11",
|
||||
CheckStatus: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(config, func() driver.Driver {
|
||||
return &Yun189PC{}
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Cloud189PC{}
|
||||
})
|
||||
}
|
||||
|
@ -10,20 +10,62 @@ import (
|
||||
|
||||
// 居然有四种返回方式
|
||||
type RespErr struct {
|
||||
ResCode string `json:"res_code"`
|
||||
ResCode any `json:"res_code"` // int or string
|
||||
ResMessage string `json:"res_message"`
|
||||
|
||||
Error_ string `json:"error"`
|
||||
|
||||
XMLName xml.Name `xml:"error"`
|
||||
Code string `json:"code" xml:"code"`
|
||||
Message string `json:"message" xml:"message"`
|
||||
|
||||
// Code string `json:"code"`
|
||||
Msg string `json:"msg"`
|
||||
Msg string `json:"msg"`
|
||||
|
||||
ErrorCode string `json:"errorCode"`
|
||||
ErrorMsg string `json:"errorMsg"`
|
||||
}
|
||||
|
||||
func (e *RespErr) HasError() bool {
|
||||
switch v := e.ResCode.(type) {
|
||||
case int, int64, int32:
|
||||
return v != 0
|
||||
case string:
|
||||
return e.ResCode != ""
|
||||
}
|
||||
return (e.Code != "" && e.Code != "SUCCESS") || e.ErrorCode != "" || e.Error_ != ""
|
||||
}
|
||||
|
||||
func (e *RespErr) Error() string {
|
||||
switch v := e.ResCode.(type) {
|
||||
case int, int64, int32:
|
||||
if v != 0 {
|
||||
return fmt.Sprintf("res_code: %d ,res_msg: %s", v, e.ResMessage)
|
||||
}
|
||||
case string:
|
||||
if e.ResCode != "" {
|
||||
return fmt.Sprintf("res_code: %s ,res_msg: %s", e.ResCode, e.ResMessage)
|
||||
}
|
||||
}
|
||||
|
||||
if e.Code != "" && e.Code != "SUCCESS" {
|
||||
if e.Msg != "" {
|
||||
return fmt.Sprintf("code: %s ,msg: %s", e.Code, e.Msg)
|
||||
}
|
||||
if e.Message != "" {
|
||||
return fmt.Sprintf("code: %s ,msg: %s", e.Code, e.Message)
|
||||
}
|
||||
return "code: " + e.Code
|
||||
}
|
||||
|
||||
if e.ErrorCode != "" {
|
||||
return fmt.Sprintf("err_code: %s ,err_msg: %s", e.ErrorCode, e.ErrorMsg)
|
||||
}
|
||||
|
||||
if e.Error_ != "" {
|
||||
return fmt.Sprintf("error: %s ,message: %s", e.ErrorCode, e.Message)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// 登陆需要的参数
|
||||
type LoginParam struct {
|
||||
// 加密后的用户名和密码
|
||||
@ -109,8 +151,13 @@ type FamilyInfoResp struct {
|
||||
/*文件部分*/
|
||||
// 文件
|
||||
type Cloud189File struct {
|
||||
CreateDate string `json:"createDate"`
|
||||
FileCata int64 `json:"fileCata"`
|
||||
ID String `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
Md5 string `json:"md5"`
|
||||
|
||||
LastOpTime Time `json:"lastOpTime"`
|
||||
CreateDate Time `json:"createDate"`
|
||||
Icon struct {
|
||||
//iconOption 5
|
||||
SmallUrl string `json:"smallUrl"`
|
||||
@ -120,62 +167,44 @@ type Cloud189File struct {
|
||||
Max600 string `json:"max600"`
|
||||
MediumURL string `json:"mediumUrl"`
|
||||
} `json:"icon"`
|
||||
ID int64 `json:"id"`
|
||||
LastOpTime string `json:"lastOpTime"`
|
||||
Md5 string `json:"md5"`
|
||||
MediaType int `json:"mediaType"`
|
||||
Name string `json:"name"`
|
||||
Orientation int64 `json:"orientation"`
|
||||
Rev string `json:"rev"`
|
||||
Size int64 `json:"size"`
|
||||
StarLabel int64 `json:"starLabel"`
|
||||
|
||||
parseTime *time.Time
|
||||
// Orientation int64 `json:"orientation"`
|
||||
// FileCata int64 `json:"fileCata"`
|
||||
// MediaType int `json:"mediaType"`
|
||||
// Rev string `json:"rev"`
|
||||
// StarLabel int64 `json:"starLabel"`
|
||||
}
|
||||
|
||||
func (c *Cloud189File) GetSize() int64 { return c.Size }
|
||||
func (c *Cloud189File) GetName() string { return c.Name }
|
||||
func (c *Cloud189File) ModTime() time.Time {
|
||||
if c.parseTime == nil {
|
||||
c.parseTime = MustParseTime(c.LastOpTime)
|
||||
}
|
||||
return *c.parseTime
|
||||
}
|
||||
func (c *Cloud189File) IsDir() bool { return false }
|
||||
func (c *Cloud189File) GetID() string { return fmt.Sprint(c.ID) }
|
||||
func (c *Cloud189File) GetPath() string { return "" }
|
||||
func (c *Cloud189File) Thumb() string { return c.Icon.SmallUrl }
|
||||
func (c *Cloud189File) GetSize() int64 { return c.Size }
|
||||
func (c *Cloud189File) GetName() string { return c.Name }
|
||||
func (c *Cloud189File) ModTime() time.Time { return time.Time(c.LastOpTime) }
|
||||
func (c *Cloud189File) IsDir() bool { return false }
|
||||
func (c *Cloud189File) GetID() string { return string(c.ID) }
|
||||
func (c *Cloud189File) GetPath() string { return "" }
|
||||
func (c *Cloud189File) Thumb() string { return c.Icon.SmallUrl }
|
||||
|
||||
// 文件夹
|
||||
type Cloud189Folder struct {
|
||||
ID int64 `json:"id"`
|
||||
ID String `json:"id"`
|
||||
ParentID int64 `json:"parentId"`
|
||||
Name string `json:"name"`
|
||||
|
||||
FileCata int64 `json:"fileCata"`
|
||||
FileCount int64 `json:"fileCount"`
|
||||
LastOpTime Time `json:"lastOpTime"`
|
||||
CreateDate Time `json:"createDate"`
|
||||
|
||||
LastOpTime string `json:"lastOpTime"`
|
||||
CreateDate string `json:"createDate"`
|
||||
|
||||
FileListSize int64 `json:"fileListSize"`
|
||||
Rev string `json:"rev"`
|
||||
StarLabel int64 `json:"starLabel"`
|
||||
|
||||
parseTime *time.Time
|
||||
// FileListSize int64 `json:"fileListSize"`
|
||||
// FileCount int64 `json:"fileCount"`
|
||||
// FileCata int64 `json:"fileCata"`
|
||||
// Rev string `json:"rev"`
|
||||
// StarLabel int64 `json:"starLabel"`
|
||||
}
|
||||
|
||||
func (c *Cloud189Folder) GetSize() int64 { return 0 }
|
||||
func (c *Cloud189Folder) GetName() string { return c.Name }
|
||||
func (c *Cloud189Folder) ModTime() time.Time {
|
||||
if c.parseTime == nil {
|
||||
c.parseTime = MustParseTime(c.LastOpTime)
|
||||
}
|
||||
return *c.parseTime
|
||||
}
|
||||
func (c *Cloud189Folder) IsDir() bool { return true }
|
||||
func (c *Cloud189Folder) GetID() string { return fmt.Sprint(c.ID) }
|
||||
func (c *Cloud189Folder) GetPath() string { return "" }
|
||||
func (c *Cloud189Folder) GetSize() int64 { return 0 }
|
||||
func (c *Cloud189Folder) GetName() string { return c.Name }
|
||||
func (c *Cloud189Folder) ModTime() time.Time { return time.Time(c.LastOpTime) }
|
||||
func (c *Cloud189Folder) IsDir() bool { return true }
|
||||
func (c *Cloud189Folder) GetID() string { return string(c.ID) }
|
||||
func (c *Cloud189Folder) GetPath() string { return "" }
|
||||
|
||||
type Cloud189FilesResp struct {
|
||||
//ResCode int `json:"res_code"`
|
||||
@ -218,6 +247,87 @@ type Part struct {
|
||||
RequestHeader string `json:"requestHeader"`
|
||||
}
|
||||
|
||||
/* 第二种上传方式 */
|
||||
type CreateUploadFileResp struct {
|
||||
// 上传文件请求ID
|
||||
UploadFileId int64 `json:"uploadFileId"`
|
||||
// 上传文件数据的URL路径
|
||||
FileUploadUrl string `json:"fileUploadUrl"`
|
||||
// 上传文件完成后确认路径
|
||||
FileCommitUrl string `json:"fileCommitUrl"`
|
||||
// 文件是否已存在云盘中,0-未存在,1-已存在
|
||||
FileDataExists int `json:"fileDataExists"`
|
||||
}
|
||||
|
||||
type GetUploadFileStatusResp struct {
|
||||
CreateUploadFileResp
|
||||
|
||||
// 已上传的大小
|
||||
DataSize int64 `json:"dataSize"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
func (r *GetUploadFileStatusResp) GetSize() int64 {
|
||||
return r.DataSize + r.Size
|
||||
}
|
||||
|
||||
type CommitMultiUploadFileResp struct {
|
||||
File struct {
|
||||
UserFileID String `json:"userFileId"`
|
||||
FileName string `json:"fileName"`
|
||||
FileSize int64 `json:"fileSize"`
|
||||
FileMd5 string `json:"fileMd5"`
|
||||
CreateDate Time `json:"createDate"`
|
||||
} `json:"file"`
|
||||
}
|
||||
|
||||
func (f *CommitMultiUploadFileResp) toFile() *Cloud189File {
|
||||
return &Cloud189File{
|
||||
ID: f.File.UserFileID,
|
||||
Name: f.File.FileName,
|
||||
Size: f.File.FileSize,
|
||||
Md5: f.File.FileMd5,
|
||||
LastOpTime: f.File.CreateDate,
|
||||
CreateDate: f.File.CreateDate,
|
||||
}
|
||||
}
|
||||
|
||||
type OldCommitUploadFileResp struct {
|
||||
XMLName xml.Name `xml:"file"`
|
||||
ID String `xml:"id"`
|
||||
Name string `xml:"name"`
|
||||
Size int64 `xml:"size"`
|
||||
Md5 string `xml:"md5"`
|
||||
CreateDate Time `xml:"createDate"`
|
||||
}
|
||||
|
||||
func (f *OldCommitUploadFileResp) toFile() *Cloud189File {
|
||||
return &Cloud189File{
|
||||
ID: f.ID,
|
||||
Name: f.Name,
|
||||
Size: f.Size,
|
||||
Md5: f.Md5,
|
||||
CreateDate: f.CreateDate,
|
||||
LastOpTime: f.CreateDate,
|
||||
}
|
||||
}
|
||||
|
||||
type CreateBatchTaskResp struct {
|
||||
TaskID string `json:"taskId"`
|
||||
}
|
||||
|
||||
type BatchTaskStateResp struct {
|
||||
FailedCount int `json:"failedCount"`
|
||||
Process int `json:"process"`
|
||||
SkipCount int `json:"skipCount"`
|
||||
SubTaskCount int `json:"subTaskCount"`
|
||||
SuccessedCount int `json:"successedCount"`
|
||||
SuccessedFileIDList []int64 `json:"successedFileIdList"`
|
||||
TaskID string `json:"taskId"`
|
||||
TaskStatus int `json:"taskStatus"` //1 初始化 2 存在冲突 3 执行中,4 完成
|
||||
}
|
||||
|
||||
/* query 加密参数*/
|
||||
type Params map[string]string
|
||||
|
||||
func (p Params) Set(k, v string) {
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
@ -15,6 +16,7 @@ import (
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
@ -23,9 +25,12 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/internal/setting"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
|
||||
"github.com/avast/retry-go"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/google/uuid"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -47,7 +52,7 @@ const (
|
||||
CHANNEL_ID = "web_cloud.189.cn"
|
||||
)
|
||||
|
||||
func (y *Yun189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
|
||||
func (y *Cloud189PC) SignatureHeader(url, method, params string) map[string]string {
|
||||
dateOfGmt := getHttpDateStr()
|
||||
sessionKey := y.tokenInfo.SessionKey
|
||||
sessionSecret := y.tokenInfo.SessionSecret
|
||||
@ -56,19 +61,40 @@ func (y *Yun189PC) request(url, method string, callback base.ReqCallback, params
|
||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||
}
|
||||
|
||||
req := y.client.R().SetQueryParams(clientSuffix()).SetHeaders(map[string]string{
|
||||
header := map[string]string{
|
||||
"Date": dateOfGmt,
|
||||
"SessionKey": sessionKey,
|
||||
"X-Request-ID": uuid.NewString(),
|
||||
})
|
||||
"Signature": signatureOfHmac(sessionSecret, sessionKey, method, url, dateOfGmt, params),
|
||||
}
|
||||
return header
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) EncryptParams(params Params) string {
|
||||
sessionSecret := y.tokenInfo.SessionSecret
|
||||
if y.isFamily() {
|
||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||
}
|
||||
if params != nil {
|
||||
return AesECBEncrypt(params.Encode(), sessionSecret[:16])
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
|
||||
req := y.client.R().SetQueryParams(clientSuffix())
|
||||
|
||||
// 设置params
|
||||
var paramsData string
|
||||
if params != nil {
|
||||
paramsData = AesECBEncrypt(params.Encode(), sessionSecret[:16])
|
||||
paramsData := y.EncryptParams(params)
|
||||
if paramsData != "" {
|
||||
req.SetQueryParam("params", paramsData)
|
||||
}
|
||||
req.SetHeader("Signature", signatureOfHmac(sessionSecret, sessionKey, method, url, dateOfGmt, paramsData))
|
||||
|
||||
// Signature
|
||||
req.SetHeaders(y.SignatureHeader(url, method, paramsData))
|
||||
|
||||
var erron RespErr
|
||||
req.SetError(&erron)
|
||||
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
@ -80,32 +106,6 @@ func (y *Yun189PC) request(url, method string, callback base.ReqCallback, params
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var erron RespErr
|
||||
utils.Json.Unmarshal(res.Body(), &erron)
|
||||
|
||||
if erron.ResCode != "" {
|
||||
return nil, fmt.Errorf("res_code: %s ,res_msg: %s", erron.ResCode, erron.ResMessage)
|
||||
}
|
||||
if erron.Code != "" && erron.Code != "SUCCESS" {
|
||||
if erron.Msg != "" {
|
||||
return nil, fmt.Errorf("code: %s ,msg: %s", erron.Code, erron.Msg)
|
||||
}
|
||||
if erron.Message != "" {
|
||||
return nil, fmt.Errorf("code: %s ,msg: %s", erron.Code, erron.Message)
|
||||
}
|
||||
return nil, fmt.Errorf(res.String())
|
||||
}
|
||||
switch erron.ErrorCode {
|
||||
case "":
|
||||
break
|
||||
case "InvalidSessionKey":
|
||||
if err = y.refreshSession(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y.request(url, method, callback, params, resp)
|
||||
default:
|
||||
return nil, fmt.Errorf("err_code: %s ,err_msg: %s", erron.ErrorCode, erron.ErrorMsg)
|
||||
}
|
||||
|
||||
if strings.Contains(res.String(), "userSessionBO is null") {
|
||||
if err = y.refreshSession(); err != nil {
|
||||
@ -114,25 +114,72 @@ func (y *Yun189PC) request(url, method string, callback base.ReqCallback, params
|
||||
return y.request(url, method, callback, params, resp)
|
||||
}
|
||||
|
||||
resCode := utils.Json.Get(res.Body(), "res_code").ToInt64()
|
||||
message := utils.Json.Get(res.Body(), "res_message").ToString()
|
||||
switch resCode {
|
||||
case 0:
|
||||
return res.Body(), nil
|
||||
default:
|
||||
return nil, fmt.Errorf("res_code: %d ,res_msg: %s", resCode, message)
|
||||
// 处理错误
|
||||
if erron.HasError() {
|
||||
if erron.ErrorCode == "InvalidSessionKey" {
|
||||
if err = y.refreshSession(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y.request(url, method, callback, params, resp)
|
||||
}
|
||||
return nil, &erron
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (y *Yun189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
return y.request(url, http.MethodGet, callback, nil, resp)
|
||||
}
|
||||
|
||||
func (y *Yun189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
return y.request(url, http.MethodPost, callback, nil, resp)
|
||||
}
|
||||
|
||||
func (y *Yun189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
|
||||
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader) ([]byte, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query := req.URL.Query()
|
||||
for key, value := range clientSuffix() {
|
||||
query.Add(key, value)
|
||||
}
|
||||
req.URL.RawQuery = query.Encode()
|
||||
|
||||
for key, value := range headers {
|
||||
req.Header.Add(key, value)
|
||||
}
|
||||
|
||||
if sign {
|
||||
for key, value := range y.SignatureHeader(url, http.MethodPut, "") {
|
||||
req.Header.Add(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var erron RespErr
|
||||
jsoniter.Unmarshal(body, &erron)
|
||||
xml.Unmarshal(body, &erron)
|
||||
if erron.HasError() {
|
||||
return nil, &erron
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, errors.Errorf("put fail,err:%s", string(body))
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
|
||||
fullUrl := API_URL
|
||||
if y.isFamily() {
|
||||
fullUrl += "/family/file"
|
||||
@ -140,13 +187,7 @@ func (y *Yun189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, er
|
||||
fullUrl += "/listFiles.action"
|
||||
|
||||
res := make([]model.Obj, 0, 130)
|
||||
for pageNum := 1; pageNum < 100; pageNum++ {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, ctx.Err()
|
||||
default:
|
||||
}
|
||||
|
||||
for pageNum := 1; ; pageNum++ {
|
||||
var resp Cloud189FilesResp
|
||||
_, err := y.get(fullUrl, func(r *resty.Request) {
|
||||
r.SetContext(ctx)
|
||||
@ -190,7 +231,7 @@ func (y *Yun189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, er
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (y *Yun189PC) login() (err error) {
|
||||
func (y *Cloud189PC) login() (err error) {
|
||||
// 初始化登陆所需参数
|
||||
if y.loginParam == nil {
|
||||
if err = y.initLoginParam(); err != nil {
|
||||
@ -198,14 +239,13 @@ func (y *Yun189PC) login() (err error) {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
defer func() {
|
||||
// 销毁验证码
|
||||
y.VCode = ""
|
||||
// 销毁登陆参数
|
||||
y.loginParam = nil
|
||||
// 遇到错误,重新加载登陆参数
|
||||
if err != nil {
|
||||
// 遇到错误,重新加载登陆参数(刷新验证码)
|
||||
if err != nil && y.NoUseOcr {
|
||||
if err1 := y.initLoginParam(); err1 != nil {
|
||||
err = fmt.Errorf("err1: %s \nerr2: %s", err, err1)
|
||||
}
|
||||
@ -228,7 +268,7 @@ func (y *Yun189PC) login() (err error) {
|
||||
"validateCode": y.VCode,
|
||||
"captchaToken": param.CaptchaToken,
|
||||
"returnUrl": RETURN_URL,
|
||||
"mailSuffix": "@189.cn",
|
||||
// "mailSuffix": "@189.cn",
|
||||
"dynamicCheck": "FALSE",
|
||||
"clientType": CLIENT_TYPE,
|
||||
"cb_SaveName": "1",
|
||||
@ -256,9 +296,8 @@ func (y *Yun189PC) login() (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
if erron.ResCode != "" {
|
||||
err = fmt.Errorf(erron.ResMessage)
|
||||
return
|
||||
if erron.HasError() {
|
||||
return &erron
|
||||
}
|
||||
if tokenInfo.ResCode != 0 {
|
||||
err = fmt.Errorf(tokenInfo.ResMessage)
|
||||
@ -271,7 +310,7 @@ func (y *Yun189PC) login() (err error) {
|
||||
/* 初始化登陆需要的参数
|
||||
* 如果遇到验证码返回错误
|
||||
*/
|
||||
func (y *Yun189PC) initLoginParam() error {
|
||||
func (y *Cloud189PC) initLoginParam() error {
|
||||
// 清除cookie
|
||||
jar, _ := cookiejar.New(nil)
|
||||
y.client.SetCookieJar(jar)
|
||||
@ -309,50 +348,56 @@ func (y *Yun189PC) initLoginParam() error {
|
||||
param.jRsaKey = fmt.Sprintf("-----BEGIN PUBLIC KEY-----\n%s\n-----END PUBLIC KEY-----", encryptConf.Data.PubKey)
|
||||
param.RsaUsername = encryptConf.Data.Pre + RsaEncrypt(param.jRsaKey, y.Username)
|
||||
param.RsaPassword = encryptConf.Data.Pre + RsaEncrypt(param.jRsaKey, y.Password)
|
||||
y.loginParam = ¶m
|
||||
|
||||
// 判断是否需要验证码
|
||||
res, err = y.client.R().
|
||||
resp, err := y.client.R().
|
||||
SetHeader("REQID", param.ReqId).
|
||||
SetFormData(map[string]string{
|
||||
"appKey": APP_ID,
|
||||
"accountType": ACCOUNT_TYPE,
|
||||
"userName": param.RsaUsername,
|
||||
}).
|
||||
Post(AUTH_URL + "/api/logbox/oauth2/needcaptcha.do")
|
||||
}).Post(AUTH_URL + "/api/logbox/oauth2/needcaptcha.do")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.String() == "0" {
|
||||
return nil
|
||||
}
|
||||
|
||||
y.loginParam = ¶m
|
||||
if res.String() != "0" {
|
||||
imgRes, err := y.client.R().
|
||||
SetQueryParams(map[string]string{
|
||||
"token": param.CaptchaToken,
|
||||
"REQID": param.ReqId,
|
||||
"rnd": fmt.Sprint(timestamp()),
|
||||
}).
|
||||
Get(AUTH_URL + "/api/logbox/oauth2/picCaptcha.do")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to obtain verification code")
|
||||
// 拉取验证码
|
||||
imgRes, err := y.client.R().
|
||||
SetQueryParams(map[string]string{
|
||||
"token": param.CaptchaToken,
|
||||
"REQID": param.ReqId,
|
||||
"rnd": fmt.Sprint(timestamp()),
|
||||
}).
|
||||
Get(AUTH_URL + "/api/logbox/oauth2/picCaptcha.do")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to obtain verification code")
|
||||
}
|
||||
if imgRes.Size() > 20 {
|
||||
if setting.GetStr(conf.OcrApi) != "" && !y.NoUseOcr {
|
||||
vRes, err := base.RestyClient.R().
|
||||
SetMultipartField("image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
|
||||
Post(setting.GetStr(conf.OcrApi))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if jsoniter.Get(vRes.Body(), "status").ToInt() == 200 {
|
||||
y.VCode = jsoniter.Get(vRes.Body(), "result").ToString()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// 尝试使用ocr
|
||||
vRes, err := base.RestyClient.R().
|
||||
SetMultipartField("image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
|
||||
Post(setting.GetStr(conf.OcrApi))
|
||||
if err == nil && jsoniter.Get(vRes.Body(), "status").ToInt() == 200 {
|
||||
y.VCode = jsoniter.Get(vRes.Body(), "result").ToString()
|
||||
}
|
||||
|
||||
// ocr无法处理,返回验证码图片给前端
|
||||
if len(y.VCode) != 4 {
|
||||
return fmt.Errorf("need validate code: data:image/png;base64,%s", base64.StdEncoding.EncodeToString(res.Body()))
|
||||
}
|
||||
// 返回验证码图片给前端
|
||||
return fmt.Errorf(`need img validate code: <img src="data:image/png;base64,%s"/>`, base64.StdEncoding.EncodeToString(imgRes.Body()))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// 刷新会话
|
||||
func (y *Yun189PC) refreshSession() (err error) {
|
||||
func (y *Cloud189PC) refreshSession() (err error) {
|
||||
var erron RespErr
|
||||
var userSessionResp UserSessionResp
|
||||
_, err = y.client.R().
|
||||
@ -376,31 +421,23 @@ func (y *Yun189PC) refreshSession() (err error) {
|
||||
}
|
||||
}()
|
||||
|
||||
switch erron.ResCode {
|
||||
case "":
|
||||
break
|
||||
case "UserInvalidOpenToken":
|
||||
if err = y.login(); err != nil {
|
||||
return err
|
||||
if erron.HasError() {
|
||||
if erron.ResCode == "UserInvalidOpenToken" {
|
||||
if err = y.login(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
default:
|
||||
err = fmt.Errorf("res_code: %s ,res_msg: %s", erron.ResCode, erron.ResMessage)
|
||||
return
|
||||
}
|
||||
|
||||
switch userSessionResp.ResCode {
|
||||
case 0:
|
||||
y.tokenInfo.UserSessionResp = userSessionResp
|
||||
default:
|
||||
err = fmt.Errorf("code: %d , msg: %s", userSessionResp.ResCode, userSessionResp.ResMessage)
|
||||
return &erron
|
||||
}
|
||||
y.tokenInfo.UserSessionResp = userSessionResp
|
||||
return
|
||||
}
|
||||
|
||||
// 普通上传
|
||||
func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
||||
const DEFAULT int64 = 10485760
|
||||
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
||||
// 无法上传大小为0的文件
|
||||
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
var DEFAULT = partSize(file.GetSize())
|
||||
var count = int(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
||||
|
||||
params := Params{
|
||||
"parentFolderId": dstDir.GetID(),
|
||||
@ -421,22 +458,20 @@ func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
|
||||
// 初始化上传
|
||||
var initMultiUpload InitMultiUploadResp
|
||||
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
||||
_, err := y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
}, params, &initMultiUpload)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fileMd5 := md5.New()
|
||||
silceMd5 := md5.New()
|
||||
silceMd5Hexs := make([]string, 0, count)
|
||||
byteData := bytes.NewBuffer(make([]byte, DEFAULT))
|
||||
for i := int64(1); i <= count; i++ {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
default:
|
||||
for i := 1; i <= count; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
|
||||
// 读取块
|
||||
@ -444,7 +479,7 @@ func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
silceMd5.Reset()
|
||||
_, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5, byteData), file, DEFAULT)
|
||||
if err != io.EOF && err != io.ErrUnexpectedEOF && err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// 计算块md5并进行hex和base64编码
|
||||
@ -462,22 +497,22 @@ func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
"uploadFileId": initMultiUpload.Data.UploadFileID,
|
||||
}, &uploadUrl)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// 开始上传
|
||||
uploadData := uploadUrl.UploadUrls[fmt.Sprint("partNumber_", i)]
|
||||
res, err := y.putClient.R().
|
||||
SetContext(ctx).
|
||||
SetQueryParams(clientSuffix()).
|
||||
SetHeaders(ParseHttpHeader(uploadData.RequestHeader)).
|
||||
SetBody(byteData).
|
||||
Put(uploadData.RequestURL)
|
||||
if err != nil {
|
||||
|
||||
err = retry.Do(func() error {
|
||||
_, err := y.put(ctx, uploadData.RequestURL, ParseHttpHeader(uploadData.RequestHeader), false, bytes.NewReader(byteData.Bytes()))
|
||||
return err
|
||||
}
|
||||
if res.StatusCode() != http.StatusOK {
|
||||
return fmt.Errorf("updload fail,msg: %s", res.String())
|
||||
},
|
||||
retry.Context(ctx),
|
||||
retry.Attempts(3),
|
||||
retry.Delay(time.Second),
|
||||
retry.MaxDelay(5*time.Second))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
up(int(i * 100 / count))
|
||||
}
|
||||
@ -485,10 +520,11 @@ func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
// 提交上传
|
||||
var resp CommitMultiUploadFileResp
|
||||
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
|
||||
func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
@ -499,23 +535,26 @@ func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
"lazyCheck": "1",
|
||||
"isLog": "0",
|
||||
"opertype": "3",
|
||||
}, nil)
|
||||
return err
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp.toFile(), nil
|
||||
}
|
||||
|
||||
// 快传
|
||||
func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
||||
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
// 需要获取完整文件md5,必须支持 io.Seek
|
||||
if _, ok := file.GetReadCloser().(*os.File); !ok {
|
||||
r, err := utils.CreateTempFile(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
file.Close()
|
||||
file.SetReadCloser(r)
|
||||
tempFile, err := utils.CreateTempFile(file.GetReadCloser(), file.GetSize())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
|
||||
const DEFAULT int64 = 10485760
|
||||
var DEFAULT = partSize(file.GetSize())
|
||||
count := int(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
||||
|
||||
// 优先计算所需信息
|
||||
@ -524,26 +563,26 @@ func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.
|
||||
silceMd5Hexs := make([]string, 0, count)
|
||||
silceMd5Base64s := make([]string, 0, count)
|
||||
for i := 1; i <= count; i++ {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
default:
|
||||
if utils.IsCanceled(ctx) {
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
|
||||
silceMd5.Reset()
|
||||
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5), file, DEFAULT); err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||
return err
|
||||
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5), tempFile, DEFAULT); err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||
return nil, err
|
||||
}
|
||||
md5Byte := silceMd5.Sum(nil)
|
||||
silceMd5Hexs = append(silceMd5Hexs, strings.ToUpper(hex.EncodeToString(md5Byte)))
|
||||
silceMd5Base64s = append(silceMd5Base64s, fmt.Sprint(i, "-", base64.StdEncoding.EncodeToString(md5Byte)))
|
||||
}
|
||||
file.GetReadCloser().(*os.File).Seek(0, io.SeekStart)
|
||||
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
// 检测是否支持快传
|
||||
@ -570,7 +609,7 @@ func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.
|
||||
req.SetContext(ctx)
|
||||
}, params, &uploadInfo)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// 网盘中不存在该文件,开始上传
|
||||
@ -584,34 +623,38 @@ func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.
|
||||
"partInfo": strings.Join(silceMd5Base64s, ","),
|
||||
}, &uploadUrls)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf := make([]byte, DEFAULT)
|
||||
for i := 1; i <= count; i++ {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
default:
|
||||
if utils.IsCanceled(ctx) {
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
|
||||
n, err := io.ReadFull(tempFile, buf)
|
||||
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||
return nil, err
|
||||
}
|
||||
uploadData := uploadUrls.UploadUrls[fmt.Sprint("partNumber_", i)]
|
||||
res, err := y.putClient.R().
|
||||
SetContext(ctx).
|
||||
SetQueryParams(clientSuffix()).
|
||||
SetHeaders(ParseHttpHeader(uploadData.RequestHeader)).
|
||||
SetBody(io.LimitReader(file, DEFAULT)).
|
||||
Put(uploadData.RequestURL)
|
||||
if err != nil {
|
||||
err = retry.Do(func() error {
|
||||
_, err := y.put(ctx, uploadData.RequestURL, ParseHttpHeader(uploadData.RequestHeader), false, bytes.NewReader(buf[:n]))
|
||||
return err
|
||||
},
|
||||
retry.Context(ctx),
|
||||
retry.Attempts(3),
|
||||
retry.Delay(time.Second),
|
||||
retry.MaxDelay(5*time.Second))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if res.StatusCode() != http.StatusOK {
|
||||
return fmt.Errorf("updload fail,msg: %s", res.String())
|
||||
}
|
||||
|
||||
up(int(i * 100 / count))
|
||||
}
|
||||
}
|
||||
|
||||
// 提交
|
||||
var resp CommitMultiUploadFileResp
|
||||
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
|
||||
func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
@ -619,15 +662,153 @@ func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.
|
||||
"uploadFileId": uploadInfo.Data.UploadFileID,
|
||||
"isLog": "0",
|
||||
"opertype": "3",
|
||||
}, nil)
|
||||
return err
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp.toFile(), nil
|
||||
}
|
||||
|
||||
func (y *Yun189PC) isFamily() bool {
|
||||
// 旧版本上传,家庭云不支持覆盖
|
||||
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
// 需要获取完整文件md5,必须支持 io.Seek
|
||||
tempFile, err := utils.CreateTempFile(file.GetReadCloser(), file.GetSize())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
|
||||
// 计算md5
|
||||
fileMd5 := md5.New()
|
||||
if _, err := io.Copy(fileMd5, tempFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
|
||||
// 创建上传会话
|
||||
var uploadInfo CreateUploadFileResp
|
||||
|
||||
fullUrl := API_URL + "/createUploadFile.action"
|
||||
if y.isFamily() {
|
||||
fullUrl = API_URL + "/family/file/createFamilyFile.action"
|
||||
}
|
||||
_, err = y.post(fullUrl, func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
if y.isFamily() {
|
||||
req.SetQueryParams(map[string]string{
|
||||
"familyId": y.FamilyID,
|
||||
"fileMd5": fileMd5Hex,
|
||||
"fileName": file.GetName(),
|
||||
"fileSize": fmt.Sprint(file.GetSize()),
|
||||
"parentId": dstDir.GetID(),
|
||||
"resumePolicy": "1",
|
||||
})
|
||||
} else {
|
||||
req.SetFormData(map[string]string{
|
||||
"parentFolderId": dstDir.GetID(),
|
||||
"fileName": file.GetName(),
|
||||
"size": fmt.Sprint(file.GetSize()),
|
||||
"md5": fileMd5Hex,
|
||||
"opertype": "3",
|
||||
"flag": "1",
|
||||
"resumePolicy": "1",
|
||||
"isLog": "0",
|
||||
// "baseFileId": "",
|
||||
// "lastWrite":"",
|
||||
// "localPath": strings.ReplaceAll(param.LocalPath, "\\", "/"),
|
||||
// "fileExt": "",
|
||||
})
|
||||
}
|
||||
}, &uploadInfo)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// 网盘中不存在该文件,开始上传
|
||||
status := GetUploadFileStatusResp{CreateUploadFileResp: uploadInfo}
|
||||
for status.Size < file.GetSize() && status.FileDataExists != 1 {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
|
||||
header := map[string]string{
|
||||
"ResumePolicy": "1",
|
||||
"Expect": "100-continue",
|
||||
}
|
||||
|
||||
if y.isFamily() {
|
||||
header["FamilyId"] = fmt.Sprint(y.FamilyID)
|
||||
header["UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
||||
} else {
|
||||
header["Edrive-UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
||||
}
|
||||
|
||||
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile))
|
||||
if err, ok := err.(*RespErr); ok && err.Code != "InputStreamReadError" {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// 获取断点状态
|
||||
fullUrl := API_URL + "/getUploadFileStatus.action"
|
||||
if y.isFamily() {
|
||||
fullUrl = API_URL + "/family/file/getFamilyFileStatus.action"
|
||||
}
|
||||
_, err = y.get(fullUrl, func(req *resty.Request) {
|
||||
req.SetContext(ctx).SetQueryParams(map[string]string{
|
||||
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
||||
"resumePolicy": "1",
|
||||
})
|
||||
if y.isFamily() {
|
||||
req.SetQueryParam("familyId", fmt.Sprint(y.FamilyID))
|
||||
}
|
||||
}, &status)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
up(int(status.Size / file.GetSize()))
|
||||
}
|
||||
|
||||
// 提交
|
||||
var resp OldCommitUploadFileResp
|
||||
_, err = y.post(status.FileCommitUrl, func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
if y.isFamily() {
|
||||
req.SetHeaders(map[string]string{
|
||||
"ResumePolicy": "1",
|
||||
"UploadFileId": fmt.Sprint(status.UploadFileId),
|
||||
"FamilyId": fmt.Sprint(y.FamilyID),
|
||||
})
|
||||
} else {
|
||||
req.SetFormData(map[string]string{
|
||||
"opertype": "3",
|
||||
"resumePolicy": "1",
|
||||
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
||||
"isLog": "0",
|
||||
})
|
||||
}
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp.toFile(), nil
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) isFamily() bool {
|
||||
return y.Type == "family"
|
||||
}
|
||||
|
||||
func (y *Yun189PC) isLogin() bool {
|
||||
func (y *Cloud189PC) isLogin() bool {
|
||||
if y.tokenInfo == nil {
|
||||
return false
|
||||
}
|
||||
@ -636,7 +817,7 @@ func (y *Yun189PC) isLogin() bool {
|
||||
}
|
||||
|
||||
// 获取家庭云所有用户信息
|
||||
func (y *Yun189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
||||
func (y *Cloud189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
||||
var resp FamilyInfoListResp
|
||||
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp)
|
||||
if err != nil {
|
||||
@ -646,7 +827,7 @@ func (y *Yun189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
||||
}
|
||||
|
||||
// 抽取家庭云ID
|
||||
func (y *Yun189PC) getFamilyID() (string, error) {
|
||||
func (y *Cloud189PC) getFamilyID() (string, error) {
|
||||
infos, err := y.getFamilyInfoList()
|
||||
if err != nil {
|
||||
return "", err
|
||||
@ -661,3 +842,33 @@ func (y *Yun189PC) getFamilyID() (string, error) {
|
||||
}
|
||||
return fmt.Sprint(infos[0].FamilyID), nil
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStateResp, error) {
|
||||
var resp BatchTaskStateResp
|
||||
_, err := y.post(API_URL+"/batch/checkBatchTask.action", func(req *resty.Request) {
|
||||
req.SetFormData(map[string]string{
|
||||
"type": aType,
|
||||
"taskId": taskID,
|
||||
})
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration) error {
|
||||
for {
|
||||
state, err := y.CheckBatchTask(aType, taskID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
switch state.TaskStatus {
|
||||
case 2:
|
||||
return errors.New("there is a conflict with the target object")
|
||||
case 4:
|
||||
return nil
|
||||
}
|
||||
time.Sleep(t)
|
||||
}
|
||||
}
|
||||
|
114
drivers/alias/driver.go
Normal file
114
drivers/alias/driver.go
Normal file
@ -0,0 +1,114 @@
|
||||
package alias
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
)
|
||||
|
||||
type Alias struct {
|
||||
model.Storage
|
||||
Addition
|
||||
pathMap map[string][]string
|
||||
autoFlatten bool
|
||||
oneKey string
|
||||
}
|
||||
|
||||
func (d *Alias) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Alias) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Alias) Init(ctx context.Context) error {
|
||||
if d.Paths == "" {
|
||||
return errors.New("paths is required")
|
||||
}
|
||||
d.pathMap = make(map[string][]string)
|
||||
for _, path := range strings.Split(d.Paths, "\n") {
|
||||
path = strings.TrimSpace(path)
|
||||
if path == "" {
|
||||
continue
|
||||
}
|
||||
k, v := getPair(path)
|
||||
d.pathMap[k] = append(d.pathMap[k], v)
|
||||
}
|
||||
if len(d.pathMap) == 1 {
|
||||
for k := range d.pathMap {
|
||||
d.oneKey = k
|
||||
}
|
||||
d.autoFlatten = true
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Alias) Drop(ctx context.Context) error {
|
||||
d.pathMap = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Alias) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
if utils.PathEqual(path, "/") {
|
||||
return &model.Object{
|
||||
Name: "Root",
|
||||
IsFolder: true,
|
||||
Path: "/",
|
||||
}, nil
|
||||
}
|
||||
root, sub := d.getRootAndPath(path)
|
||||
dsts, ok := d.pathMap[root]
|
||||
if !ok {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
for _, dst := range dsts {
|
||||
obj, err := d.get(ctx, path, dst, sub)
|
||||
if err == nil {
|
||||
return obj, nil
|
||||
}
|
||||
}
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
func (d *Alias) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
path := dir.GetPath()
|
||||
if utils.PathEqual(path, "/") && !d.autoFlatten {
|
||||
return d.listRoot(), nil
|
||||
}
|
||||
root, sub := d.getRootAndPath(path)
|
||||
dsts, ok := d.pathMap[root]
|
||||
if !ok {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
var objs []model.Obj
|
||||
for _, dst := range dsts {
|
||||
tmp, err := d.list(ctx, dst, sub)
|
||||
if err == nil {
|
||||
objs = append(objs, tmp...)
|
||||
}
|
||||
}
|
||||
return objs, nil
|
||||
}
|
||||
|
||||
func (d *Alias) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
root, sub := d.getRootAndPath(file.GetPath())
|
||||
dsts, ok := d.pathMap[root]
|
||||
if !ok {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
for _, dst := range dsts {
|
||||
link, err := d.link(ctx, dst, sub, args)
|
||||
if err == nil {
|
||||
return link, nil
|
||||
}
|
||||
}
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Alias)(nil)
|
27
drivers/alias/meta.go
Normal file
27
drivers/alias/meta.go
Normal file
@ -0,0 +1,27 @@
|
||||
package alias
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
// Usually one of two
|
||||
// driver.RootPath
|
||||
// define other
|
||||
Paths string `json:"paths" required:"true" type:"text"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "Alias",
|
||||
LocalSort: true,
|
||||
NoCache: true,
|
||||
NoUpload: true,
|
||||
DefaultRoot: "/",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Alias{}
|
||||
})
|
||||
}
|
1
drivers/alias/types.go
Normal file
1
drivers/alias/types.go
Normal file
@ -0,0 +1 @@
|
||||
package alias
|
114
drivers/alias/util.go
Normal file
114
drivers/alias/util.go
Normal file
@ -0,0 +1,114 @@
|
||||
package alias
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/sign"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
)
|
||||
|
||||
func (d *Alias) listRoot() []model.Obj {
|
||||
var objs []model.Obj
|
||||
for k, _ := range d.pathMap {
|
||||
obj := model.Object{
|
||||
Name: k,
|
||||
IsFolder: true,
|
||||
Modified: d.Modified,
|
||||
}
|
||||
objs = append(objs, &obj)
|
||||
}
|
||||
return objs
|
||||
}
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
func getPair(path string) (string, string) {
|
||||
//path = strings.TrimSpace(path)
|
||||
if strings.Contains(path, ":") {
|
||||
pair := strings.SplitN(path, ":", 2)
|
||||
if !strings.Contains(pair[0], "/") {
|
||||
return pair[0], pair[1]
|
||||
}
|
||||
}
|
||||
return stdpath.Base(path), path
|
||||
}
|
||||
|
||||
func (d *Alias) getRootAndPath(path string) (string, string) {
|
||||
if d.autoFlatten {
|
||||
return d.oneKey, path
|
||||
}
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
parts := strings.SplitN(path, "/", 2)
|
||||
if len(parts) == 1 {
|
||||
return parts[0], ""
|
||||
}
|
||||
return parts[0], parts[1]
|
||||
}
|
||||
|
||||
func (d *Alias) get(ctx context.Context, path string, dst, sub string) (model.Obj, error) {
|
||||
obj, err := fs.Get(ctx, stdpath.Join(dst, sub), &fs.GetArgs{NoLog: true})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Object{
|
||||
Path: path,
|
||||
Name: obj.GetName(),
|
||||
Size: obj.GetSize(),
|
||||
Modified: obj.ModTime(),
|
||||
IsFolder: obj.IsDir(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Alias) list(ctx context.Context, dst, sub string) ([]model.Obj, error) {
|
||||
objs, err := fs.List(ctx, stdpath.Join(dst, sub), &fs.ListArgs{NoLog: true})
|
||||
// the obj must implement the model.SetPath interface
|
||||
// return objs, err
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(objs, func(obj model.Obj) (model.Obj, error) {
|
||||
thumb, ok := model.GetThumb(obj)
|
||||
objRes := model.Object{
|
||||
Name: obj.GetName(),
|
||||
Size: obj.GetSize(),
|
||||
Modified: obj.ModTime(),
|
||||
IsFolder: obj.IsDir(),
|
||||
}
|
||||
if !ok {
|
||||
return &objRes, nil
|
||||
}
|
||||
return &model.ObjThumb{
|
||||
Object: objRes,
|
||||
Thumbnail: model.Thumbnail{
|
||||
Thumbnail: thumb,
|
||||
},
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs) (*model.Link, error) {
|
||||
reqPath := stdpath.Join(dst, sub)
|
||||
storage, err := fs.GetStorage(reqPath, &fs.GetStoragesArgs{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err = fs.Get(ctx, reqPath, &fs.GetArgs{NoLog: true})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
||||
return &model.Link{
|
||||
URL: fmt.Sprintf("%s/p%s?sign=%s",
|
||||
common.GetApiUrl(args.HttpReq),
|
||||
utils.EncodePath(reqPath, true),
|
||||
sign.Sign(reqPath)),
|
||||
}, nil
|
||||
}
|
||||
link, _, err := fs.Link(ctx, reqPath, args)
|
||||
return link, err
|
||||
}
|
118
drivers/alist_v2/driver.go
Normal file
118
drivers/alist_v2/driver.go
Normal file
@ -0,0 +1,118 @@
|
||||
package alist_v2
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
)
|
||||
|
||||
type AListV2 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
}
|
||||
|
||||
func (d *AListV2) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *AListV2) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *AListV2) Init(ctx context.Context) error {
|
||||
if len(d.Addition.Address) > 0 && string(d.Addition.Address[len(d.Addition.Address)-1]) == "/" {
|
||||
d.Addition.Address = d.Addition.Address[0 : len(d.Addition.Address)-1]
|
||||
}
|
||||
// TODO login / refresh token
|
||||
//op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AListV2) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AListV2) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
url := d.Address + "/api/public/path"
|
||||
var resp common.Resp[PathResp]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(PathReq{
|
||||
PageNum: 0,
|
||||
PageSize: 0,
|
||||
Path: dir.GetPath(),
|
||||
Password: d.Password,
|
||||
}).Post(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var files []model.Obj
|
||||
for _, f := range resp.Data.Files {
|
||||
file := model.ObjThumb{
|
||||
Object: model.Object{
|
||||
Name: f.Name,
|
||||
Modified: *f.UpdatedAt,
|
||||
Size: f.Size,
|
||||
IsFolder: f.Type == 1,
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
|
||||
}
|
||||
files = append(files, &file)
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *AListV2) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
url := d.Address + "/api/public/path"
|
||||
var resp common.Resp[PathResp]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(PathReq{
|
||||
PageNum: 0,
|
||||
PageSize: 0,
|
||||
Path: file.GetPath(),
|
||||
Password: d.Password,
|
||||
}).Post(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{
|
||||
URL: resp.Data.Files[0].Url,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *AListV2) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *AListV2) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *AListV2) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *AListV2) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *AListV2) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *AListV2) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
var _ driver.Driver = (*AListV2)(nil)
|
26
drivers/alist_v2/meta.go
Normal file
26
drivers/alist_v2/meta.go
Normal file
@ -0,0 +1,26 @@
|
||||
package alist_v2
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootPath
|
||||
Address string `json:"url" required:"true"`
|
||||
Password string `json:"password"`
|
||||
AccessToken string `json:"access_token"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "AList V2",
|
||||
LocalSort: true,
|
||||
NoUpload: true,
|
||||
DefaultRoot: "/",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &AListV2{}
|
||||
})
|
||||
}
|
31
drivers/alist_v2/types.go
Normal file
31
drivers/alist_v2/types.go
Normal file
@ -0,0 +1,31 @@
|
||||
package alist_v2
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type File struct {
|
||||
Id string `json:"-"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
Type int `json:"type"`
|
||||
Driver string `json:"driver"`
|
||||
UpdatedAt *time.Time `json:"updated_at"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Url string `json:"url"`
|
||||
SizeStr string `json:"size_str"`
|
||||
TimeStr string `json:"time_str"`
|
||||
}
|
||||
|
||||
type PathResp struct {
|
||||
Type string `json:"type"`
|
||||
//Meta Meta `json:"meta"`
|
||||
Files []File `json:"files"`
|
||||
}
|
||||
|
||||
type PathReq struct {
|
||||
PageNum int `json:"page_num"`
|
||||
PageSize int `json:"page_size"`
|
||||
Password string `json:"password"`
|
||||
Path string `json:"path"`
|
||||
}
|
1
drivers/alist_v2/util.go
Normal file
1
drivers/alist_v2/util.go
Normal file
@ -0,0 +1 @@
|
||||
package alist_v2
|
188
drivers/alist_v3/driver.go
Normal file
188
drivers/alist_v3/driver.go
Normal file
@ -0,0 +1,188 @@
|
||||
package alist_v3
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
type AListV3 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
}
|
||||
|
||||
func (d *AListV3) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *AListV3) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *AListV3) Init(ctx context.Context) error {
|
||||
d.Addition.Address = strings.TrimSuffix(d.Addition.Address, "/")
|
||||
var resp common.Resp[MeResp]
|
||||
_, err := d.request("/me", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// if the username is not empty and the username is not the same as the current username, then login again
|
||||
if d.Username != "" && d.Username != resp.Data.Username {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// re-get the user info
|
||||
_, err = d.request("/me", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Data.Role == model.GUEST {
|
||||
url := d.Address + "/api/public/settings"
|
||||
res, err := base.RestyClient.R().Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
allowMounted := utils.Json.Get(res.Body(), "data", conf.AllowMounted).ToString() == "true"
|
||||
if !allowMounted {
|
||||
return fmt.Errorf("the site does not allow mounted")
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
var resp common.Resp[FsListResp]
|
||||
_, err := d.request("/fs/list", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetBody(ListReq{
|
||||
PageReq: model.PageReq{
|
||||
Page: 1,
|
||||
PerPage: 0,
|
||||
},
|
||||
Path: dir.GetPath(),
|
||||
Password: d.MetaPassword,
|
||||
Refresh: false,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var files []model.Obj
|
||||
for _, f := range resp.Data.Content {
|
||||
file := model.ObjThumb{
|
||||
Object: model.Object{
|
||||
Name: f.Name,
|
||||
Modified: f.Modified,
|
||||
Size: f.Size,
|
||||
IsFolder: f.IsDir,
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumb},
|
||||
}
|
||||
files = append(files, &file)
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
var resp common.Resp[FsGetResp]
|
||||
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetBody(FsGetReq{
|
||||
Path: file.GetPath(),
|
||||
Password: d.MetaPassword,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{
|
||||
URL: resp.Data.RawURL,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *AListV3) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
_, err := d.request("/fs/mkdir", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(MkdirOrLinkReq{
|
||||
Path: path.Join(parentDir.GetPath(), dirName),
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
_, err := d.request("/fs/move", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(MoveCopyReq{
|
||||
SrcDir: path.Dir(srcObj.GetPath()),
|
||||
DstDir: dstDir.GetPath(),
|
||||
Names: []string{srcObj.GetName()},
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
_, err := d.request("/fs/rename", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(RenameReq{
|
||||
Path: srcObj.GetPath(),
|
||||
Name: newName,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
_, err := d.request("/fs/copy", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(MoveCopyReq{
|
||||
SrcDir: path.Dir(srcObj.GetPath()),
|
||||
DstDir: dstDir.GetPath(),
|
||||
Names: []string{srcObj.GetName()},
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
|
||||
_, err := d.request("/fs/remove", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(RemoveReq{
|
||||
Dir: path.Dir(obj.GetPath()),
|
||||
Names: []string{obj.GetName()},
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
_, err := d.request("/fs/put", http.MethodPut, func(req *resty.Request) {
|
||||
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||
SetHeader("Password", d.MetaPassword).
|
||||
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
||||
SetContentLength(true).
|
||||
SetBody(stream.GetReadCloser())
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
var _ driver.Driver = (*AListV3)(nil)
|
28
drivers/alist_v3/meta.go
Normal file
28
drivers/alist_v3/meta.go
Normal file
@ -0,0 +1,28 @@
|
||||
package alist_v3
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootPath
|
||||
Address string `json:"url" required:"true"`
|
||||
MetaPassword string `json:"meta_password"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "AList V3",
|
||||
LocalSort: true,
|
||||
DefaultRoot: "/",
|
||||
CheckStatus: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &AListV3{}
|
||||
})
|
||||
}
|
81
drivers/alist_v3/types.go
Normal file
81
drivers/alist_v3/types.go
Normal file
@ -0,0 +1,81 @@
|
||||
package alist_v3
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type ListReq struct {
|
||||
model.PageReq
|
||||
Path string `json:"path" form:"path"`
|
||||
Password string `json:"password" form:"password"`
|
||||
Refresh bool `json:"refresh"`
|
||||
}
|
||||
|
||||
type ObjResp struct {
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
IsDir bool `json:"is_dir"`
|
||||
Modified time.Time `json:"modified"`
|
||||
Sign string `json:"sign"`
|
||||
Thumb string `json:"thumb"`
|
||||
Type int `json:"type"`
|
||||
}
|
||||
|
||||
type FsListResp struct {
|
||||
Content []ObjResp `json:"content"`
|
||||
Total int64 `json:"total"`
|
||||
Readme string `json:"readme"`
|
||||
Write bool `json:"write"`
|
||||
Provider string `json:"provider"`
|
||||
}
|
||||
|
||||
type FsGetReq struct {
|
||||
Path string `json:"path" form:"path"`
|
||||
Password string `json:"password" form:"password"`
|
||||
}
|
||||
|
||||
type FsGetResp struct {
|
||||
ObjResp
|
||||
RawURL string `json:"raw_url"`
|
||||
Readme string `json:"readme"`
|
||||
Provider string `json:"provider"`
|
||||
Related []ObjResp `json:"related"`
|
||||
}
|
||||
|
||||
type MkdirOrLinkReq struct {
|
||||
Path string `json:"path" form:"path"`
|
||||
}
|
||||
|
||||
type MoveCopyReq struct {
|
||||
SrcDir string `json:"src_dir"`
|
||||
DstDir string `json:"dst_dir"`
|
||||
Names []string `json:"names"`
|
||||
}
|
||||
|
||||
type RenameReq struct {
|
||||
Path string `json:"path"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type RemoveReq struct {
|
||||
Dir string `json:"dir"`
|
||||
Names []string `json:"names"`
|
||||
}
|
||||
|
||||
type LoginResp struct {
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
type MeResp struct {
|
||||
Id int `json:"id"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
BasePath string `json:"base_path"`
|
||||
Role int `json:"role"`
|
||||
Disabled bool `json:"disabled"`
|
||||
Permission int `json:"permission"`
|
||||
SsoId string `json:"sso_id"`
|
||||
Otp bool `json:"otp"`
|
||||
}
|
58
drivers/alist_v3/util.go
Normal file
58
drivers/alist_v3/util.go
Normal file
@ -0,0 +1,58 @@
|
||||
package alist_v3
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (d *AListV3) login() error {
|
||||
var resp common.Resp[LoginResp]
|
||||
_, err := d.request("/auth/login", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetBody(base.Json{
|
||||
"username": d.Username,
|
||||
"password": d.Password,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.Token = resp.Data.Token
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AListV3) request(api, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||
url := d.Address + "/api" + api
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeader("Authorization", d.Token)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debugf("[alist_v3] response body: %s", res.String())
|
||||
if res.StatusCode() >= 400 {
|
||||
return nil, fmt.Errorf("request failed, status: %s", res.Status())
|
||||
}
|
||||
code := utils.Json.Get(res.Body(), "code").ToInt()
|
||||
if code != 200 {
|
||||
if (code == 401 || code == 403) && !utils.IsBool(retry...) {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(api, method, callback, true)
|
||||
}
|
||||
return nil, fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(res.Body(), "message").ToString())
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
@ -31,6 +31,7 @@ type AliDrive struct {
|
||||
AccessToken string
|
||||
cron *cron.Cron
|
||||
DriveId string
|
||||
UserID string
|
||||
}
|
||||
|
||||
func (d *AliDrive) Config() driver.Config {
|
||||
@ -38,18 +39,13 @@ func (d *AliDrive) Config() driver.Config {
|
||||
}
|
||||
|
||||
func (d *AliDrive) GetAddition() driver.Additional {
|
||||
return d.Addition
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *AliDrive) Init(ctx context.Context, storage model.Storage) error {
|
||||
d.Storage = storage
|
||||
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
func (d *AliDrive) Init(ctx context.Context) error {
|
||||
// TODO login / refresh token
|
||||
//op.MustSaveDriverStorage(d)
|
||||
err = d.refreshToken()
|
||||
err := d.refreshToken()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -59,6 +55,7 @@ func (d *AliDrive) Init(ctx context.Context, storage model.Storage) error {
|
||||
return err
|
||||
}
|
||||
d.DriveId = utils.Json.Get(res, "default_drive_id").ToString()
|
||||
d.UserID = utils.Json.Get(res, "user_id").ToString()
|
||||
d.cron = cron.NewCron(time.Hour * 2)
|
||||
d.cron.Do(func() {
|
||||
err := d.refreshToken()
|
||||
@ -66,7 +63,22 @@ func (d *AliDrive) Init(ctx context.Context, storage model.Storage) error {
|
||||
log.Errorf("%+v", err)
|
||||
}
|
||||
})
|
||||
return err
|
||||
if global.Has(d.UserID) {
|
||||
return nil
|
||||
}
|
||||
// init deviceID
|
||||
deviceID := utils.GetSHA256Encode([]byte(d.UserID))
|
||||
// init privateKey
|
||||
privateKey, _ := NewPrivateKeyFromHex(deviceID)
|
||||
state := State{
|
||||
privateKey: privateKey,
|
||||
deviceID: deviceID,
|
||||
}
|
||||
// store state
|
||||
global.Store(d.UserID, &state)
|
||||
// init signature
|
||||
d.sign()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliDrive) Drop(ctx context.Context) error {
|
||||
@ -86,11 +98,6 @@ func (d *AliDrive) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
||||
})
|
||||
}
|
||||
|
||||
//func (d *AliDrive) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
// // TODO this is optional
|
||||
// return nil, errs.NotImplement
|
||||
//}
|
||||
|
||||
func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
data := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
@ -179,17 +186,27 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
"type": "file",
|
||||
}
|
||||
|
||||
var localFile *os.File
|
||||
if fileStream, ok := file.ReadCloser.(*model.FileStream); ok {
|
||||
localFile, _ = fileStream.ReadCloser.(*os.File)
|
||||
}
|
||||
if d.RapidUpload {
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||
io.CopyN(buf, file, 1024)
|
||||
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.String())
|
||||
// 把头部拼接回去
|
||||
file.ReadCloser = struct {
|
||||
io.Reader
|
||||
io.Closer
|
||||
}{
|
||||
Reader: io.MultiReader(buf, file),
|
||||
Closer: file,
|
||||
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.Bytes())
|
||||
if localFile != nil {
|
||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// 把头部拼接回去
|
||||
file.ReadCloser = struct {
|
||||
io.Reader
|
||||
io.Closer
|
||||
}{
|
||||
Reader: io.MultiReader(buf, file),
|
||||
Closer: file,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
reqBody["content_hash_name"] = "none"
|
||||
@ -206,18 +223,28 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
}
|
||||
|
||||
if d.RapidUpload && e.Code == "PreHashMatched" {
|
||||
tempFile, err := os.CreateTemp(conf.Conf.TempDir, "file-*")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
delete(reqBody, "pre_hash")
|
||||
h := sha1.New()
|
||||
if _, err = io.Copy(io.MultiWriter(tempFile, h), file); err != nil {
|
||||
return err
|
||||
if localFile != nil {
|
||||
if err = utils.CopyWithCtx(ctx, h, localFile, 0, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = localFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
tempFile, err := os.CreateTemp(conf.Conf.TempDir, "file-*")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
if err = utils.CopyWithCtx(ctx, io.MultiWriter(tempFile, h), file, 0, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
localFile = tempFile
|
||||
}
|
||||
reqBody["content_hash"] = hex.EncodeToString(h.Sum(nil))
|
||||
reqBody["content_hash_name"] = "sha1"
|
||||
@ -232,10 +259,13 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
(t.file.slice(o.toNumber(), Math.min(o.plus(8).toNumber(), t.file.size)))
|
||||
*/
|
||||
buf := make([]byte, 8)
|
||||
r, _ := new(big.Int).SetString(utils.GetMD5Encode(d.AccessToken)[:16], 16)
|
||||
r, _ := new(big.Int).SetString(utils.GetMD5EncodeStr(d.AccessToken)[:16], 16)
|
||||
i := new(big.Int).SetInt64(file.GetSize())
|
||||
o := r.Mod(r, i)
|
||||
n, _ := io.NewSectionReader(tempFile, o.Int64(), 8).Read(buf[:8])
|
||||
o := new(big.Int).SetInt64(0)
|
||||
if file.GetSize() > 0 {
|
||||
o = r.Mod(r, i)
|
||||
}
|
||||
n, _ := io.NewSectionReader(localFile, o.Int64(), 8).Read(buf[:8])
|
||||
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
|
||||
|
||||
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||
@ -248,23 +278,33 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
return nil
|
||||
}
|
||||
// 秒传失败
|
||||
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
|
||||
if _, err = localFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
file.ReadCloser = tempFile
|
||||
file.ReadCloser = localFile
|
||||
}
|
||||
|
||||
for i, partInfo := range resp.PartInfoList {
|
||||
req, err := http.NewRequest("PUT", partInfo.UploadUrl, io.LimitReader(file, DEFAULT))
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
url := partInfo.UploadUrl
|
||||
if d.InternalUpload {
|
||||
url = partInfo.InternalUploadUrl
|
||||
}
|
||||
req, err := http.NewRequest("PUT", url, io.LimitReader(file, DEFAULT))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res.Body.Close()
|
||||
up(i * 100 / count)
|
||||
if count > 0 {
|
||||
up(i * 100 / count)
|
||||
}
|
||||
}
|
||||
var resp2 base.Json
|
||||
_, err, e = d.request("https://api.aliyundrive.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
|
||||
@ -284,7 +324,30 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
}
|
||||
|
||||
func (d *AliDrive) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
return nil, errs.NotSupport
|
||||
var resp base.Json
|
||||
var url string
|
||||
data := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": args.Obj.GetID(),
|
||||
}
|
||||
switch args.Method {
|
||||
case "doc_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
||||
data["access_token"] = d.AccessToken
|
||||
case "video_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
||||
data["category"] = "live_transcoding"
|
||||
data["url_expire_sec"] = 14400
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
_, err, _ := d.request(url, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*AliDrive)(nil)
|
||||
|
16
drivers/aliyundrive/global.go
Normal file
16
drivers/aliyundrive/global.go
Normal file
@ -0,0 +1,16 @@
|
||||
package aliyundrive
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/alist-org/alist/v3/pkg/generic_sync"
|
||||
)
|
||||
|
||||
type State struct {
|
||||
deviceID string
|
||||
signature string
|
||||
retry int
|
||||
privateKey *ecdsa.PrivateKey
|
||||
}
|
||||
|
||||
var global = generic_sync.MapOf[string, *State]{}
|
66
drivers/aliyundrive/help.go
Normal file
66
drivers/aliyundrive/help.go
Normal file
@ -0,0 +1,66 @@
|
||||
package aliyundrive
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"math/big"
|
||||
|
||||
"github.com/dustinxie/ecc"
|
||||
)
|
||||
|
||||
func NewPrivateKey() (*ecdsa.PrivateKey, error) {
|
||||
p256k1 := ecc.P256k1()
|
||||
return ecdsa.GenerateKey(p256k1, rand.Reader)
|
||||
}
|
||||
|
||||
func NewPrivateKeyFromHex(hex_ string) (*ecdsa.PrivateKey, error) {
|
||||
data, err := hex.DecodeString(hex_)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewPrivateKeyFromBytes(data), nil
|
||||
|
||||
}
|
||||
|
||||
func NewPrivateKeyFromBytes(priv []byte) *ecdsa.PrivateKey {
|
||||
p256k1 := ecc.P256k1()
|
||||
x, y := p256k1.ScalarBaseMult(priv)
|
||||
return &ecdsa.PrivateKey{
|
||||
PublicKey: ecdsa.PublicKey{
|
||||
Curve: p256k1,
|
||||
X: x,
|
||||
Y: y,
|
||||
},
|
||||
D: new(big.Int).SetBytes(priv),
|
||||
}
|
||||
}
|
||||
|
||||
func PrivateKeyToHex(private *ecdsa.PrivateKey) string {
|
||||
return hex.EncodeToString(PrivateKeyToBytes(private))
|
||||
}
|
||||
|
||||
func PrivateKeyToBytes(private *ecdsa.PrivateKey) []byte {
|
||||
return private.D.Bytes()
|
||||
}
|
||||
|
||||
func PublicKeyToHex(public *ecdsa.PublicKey) string {
|
||||
return hex.EncodeToString(PublicKeyToBytes(public))
|
||||
}
|
||||
|
||||
func PublicKeyToBytes(public *ecdsa.PublicKey) []byte {
|
||||
x := public.X.Bytes()
|
||||
if len(x) < 32 {
|
||||
for i := 0; i < 32-len(x); i++ {
|
||||
x = append([]byte{0}, x...)
|
||||
}
|
||||
}
|
||||
|
||||
y := public.Y.Bytes()
|
||||
if len(y) < 32 {
|
||||
for i := 0; i < 32-len(y); i++ {
|
||||
y = append([]byte{0}, y...)
|
||||
}
|
||||
}
|
||||
return append(x, y...)
|
||||
}
|
@ -7,21 +7,24 @@ import (
|
||||
|
||||
type Addition struct {
|
||||
driver.RootID
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
//DeviceID string `json:"device_id" required:"true"`
|
||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
|
||||
RapidUpload bool `json:"rapid_upload"`
|
||||
InternalUpload bool `json:"internal_upload"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "Aliyundrive",
|
||||
DefaultRoot: "root",
|
||||
}
|
||||
|
||||
func New() driver.Driver {
|
||||
return &AliDrive{}
|
||||
Alert: `warning|There may be an infinite loop bug in this driver.
|
||||
Deprecated, no longer maintained and will be removed in a future version.
|
||||
We recommend using the official driver AliyundriveOpen.`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(config, New)
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &AliDrive{}
|
||||
})
|
||||
}
|
||||
|
@ -40,6 +40,7 @@ func fileToObj(f File) *model.ObjThumb {
|
||||
Modified: f.UpdatedAt,
|
||||
IsFolder: f.Type == "folder",
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
|
||||
}
|
||||
}
|
||||
|
||||
@ -47,7 +48,8 @@ type UploadResp struct {
|
||||
FileId string `json:"file_id"`
|
||||
UploadId string `json:"upload_id"`
|
||||
PartInfoList []struct {
|
||||
UploadUrl string `json:"upload_url"`
|
||||
UploadUrl string `json:"upload_url"`
|
||||
InternalUploadUrl string `json:"internal_upload_url"`
|
||||
} `json:"part_info_list"`
|
||||
|
||||
RapidUpload bool `json:"rapid_upload"`
|
||||
|
@ -1,6 +1,8 @@
|
||||
package aliyundrive
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
@ -8,9 +10,51 @@ import (
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/dustinxie/ecc"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
func (d *AliDrive) createSession() error {
|
||||
state, ok := global.Load(d.UserID)
|
||||
if !ok {
|
||||
return fmt.Errorf("can't load user state, user_id: %s", d.UserID)
|
||||
}
|
||||
d.sign()
|
||||
state.retry++
|
||||
if state.retry > 3 {
|
||||
state.retry = 0
|
||||
return fmt.Errorf("createSession failed after three retries")
|
||||
}
|
||||
_, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"deviceName": "samsung",
|
||||
"modelName": "SM-G9810",
|
||||
"nonce": 0,
|
||||
"pubKey": PublicKeyToHex(&state.privateKey.PublicKey),
|
||||
"refreshToken": d.RefreshToken,
|
||||
})
|
||||
}, nil)
|
||||
if err == nil{
|
||||
state.retry = 0
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// func (d *AliDrive) renewSession() error {
|
||||
// _, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
|
||||
// return err
|
||||
// }
|
||||
|
||||
func (d *AliDrive) sign() {
|
||||
state, _ := global.Load(d.UserID)
|
||||
secpAppID := "5dde4e1bdf9e4966b387ba58f4b3fdc3"
|
||||
singdata := fmt.Sprintf("%s:%s:%s:%d", secpAppID, state.deviceID, d.UserID, 0)
|
||||
hash := sha256.Sum256([]byte(singdata))
|
||||
data, _ := ecc.SignBytes(state.privateKey, hash[:], ecc.RecID|ecc.LowerS)
|
||||
state.signature = hex.EncodeToString(data) //strconv.Itoa(state.nonce)
|
||||
}
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
func (d *AliDrive) refreshToken() error {
|
||||
@ -29,6 +73,9 @@ func (d *AliDrive) refreshToken() error {
|
||||
if e.Code != "" {
|
||||
return fmt.Errorf("failed to refresh token: %s", e.Message)
|
||||
}
|
||||
if resp.RefreshToken == "" {
|
||||
return errors.New("failed to refresh token: refresh token is empty")
|
||||
}
|
||||
d.RefreshToken, d.AccessToken = resp.RefreshToken, resp.AccessToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
@ -36,9 +83,24 @@ func (d *AliDrive) refreshToken() error {
|
||||
|
||||
func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp interface{}) ([]byte, error, RespErr) {
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeader("Authorization", "Bearer\t"+d.AccessToken)
|
||||
req.SetHeader("content-type", "application/json")
|
||||
req.SetHeader("origin", "https://www.aliyundrive.com")
|
||||
state, ok := global.Load(d.UserID)
|
||||
if !ok {
|
||||
if url == "https://api.aliyundrive.com/v2/user/get" {
|
||||
state = &State{}
|
||||
} else {
|
||||
return nil, fmt.Errorf("can't load user state, user_id: %s", d.UserID), RespErr{}
|
||||
}
|
||||
}
|
||||
req.SetHeaders(map[string]string{
|
||||
"Authorization": "Bearer\t" + d.AccessToken,
|
||||
"content-type": "application/json",
|
||||
"origin": "https://www.aliyundrive.com",
|
||||
"Referer": "https://aliyundrive.com/",
|
||||
"X-Signature": state.signature,
|
||||
"x-request-id": uuid.NewString(),
|
||||
"X-Canary": "client=Android,app=adrive,version=v4.1.0",
|
||||
"X-Device-Id": state.deviceID,
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
} else {
|
||||
@ -54,14 +116,23 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
|
||||
return nil, err, e
|
||||
}
|
||||
if e.Code != "" {
|
||||
if e.Code == "AccessTokenInvalid" {
|
||||
switch e.Code {
|
||||
case "AccessTokenInvalid":
|
||||
err = d.refreshToken()
|
||||
if err != nil {
|
||||
return nil, err, e
|
||||
}
|
||||
return d.request(url, method, callback, resp)
|
||||
case "DeviceSessionSignatureInvalid":
|
||||
err = d.createSession()
|
||||
if err != nil {
|
||||
return nil, err, e
|
||||
}
|
||||
default:
|
||||
return nil, errors.New(e.Message), e
|
||||
}
|
||||
return nil, errors.New(e.Message), e
|
||||
return d.request(url, method, callback, resp)
|
||||
} else if res.IsError() {
|
||||
return nil, errors.New("bad status code " + res.Status()), e
|
||||
}
|
||||
return res.Body(), nil, e
|
||||
}
|
||||
|
201
drivers/aliyundrive_open/driver.go
Normal file
201
drivers/aliyundrive_open/driver.go
Normal file
@ -0,0 +1,201 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Xhofe/rateg"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
type AliyundriveOpen struct {
|
||||
model.Storage
|
||||
Addition
|
||||
base string
|
||||
|
||||
DriveId string
|
||||
|
||||
limitList func(ctx context.Context, data base.Json) (*Files, error)
|
||||
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Init(ctx context.Context) error {
|
||||
if d.LIVPDownloadFormat == "" {
|
||||
d.LIVPDownloadFormat = "jpeg"
|
||||
}
|
||||
if d.DriveType == "" {
|
||||
d.DriveType = "default"
|
||||
}
|
||||
res, err := d.request("/adrive/v1.0/user/getDriveInfo", http.MethodPost, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.DriveId = utils.Json.Get(res, d.DriveType+"_drive_id").ToString()
|
||||
d.limitList = rateg.LimitFnCtx(d.list, rateg.LimitFnOption{
|
||||
Limit: 4,
|
||||
Bucket: 1,
|
||||
})
|
||||
d.limitLink = rateg.LimitFnCtx(d.link, rateg.LimitFnOption{
|
||||
Limit: 1,
|
||||
Bucket: 1,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if d.limitList == nil {
|
||||
return nil, fmt.Errorf("driver not init")
|
||||
}
|
||||
files, err := d.getFiles(ctx, dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return fileToObj(src), nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link, error) {
|
||||
res, err := d.request("/adrive/v1.0/openFile/getDownloadUrl", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": file.GetID(),
|
||||
"expire_sec": 14400,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
url := utils.Json.Get(res, "url").ToString()
|
||||
if url == "" {
|
||||
if utils.Ext(file.GetName()) != "livp" {
|
||||
return nil, errors.New("get download url failed: " + string(res))
|
||||
}
|
||||
url = utils.Json.Get(res, "streamsUrl", d.LIVPDownloadFormat).ToString()
|
||||
}
|
||||
exp := time.Hour
|
||||
return &model.Link{
|
||||
URL: url,
|
||||
Expiration: &exp,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if d.limitLink == nil {
|
||||
return nil, fmt.Errorf("driver not init")
|
||||
}
|
||||
return d.limitLink(ctx, file)
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
_, err := d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"parent_file_id": parentDir.GetID(),
|
||||
"name": dirName,
|
||||
"type": "folder",
|
||||
"check_name_mode": "refuse",
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
_, err := d.request("/adrive/v1.0/openFile/move", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": srcObj.GetID(),
|
||||
"to_parent_file_id": dstDir.GetID(),
|
||||
"check_name_mode": "refuse", // optional:ignore,auto_rename,refuse
|
||||
//"new_name": "newName", // The new name to use when a file of the same name exists
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
_, err := d.request("/adrive/v1.0/openFile/update", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": srcObj.GetID(),
|
||||
"name": newName,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
_, err := d.request("/adrive/v1.0/openFile/copy", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": srcObj.GetID(),
|
||||
"to_parent_file_id": dstDir.GetID(),
|
||||
"auto_rename": true,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Remove(ctx context.Context, obj model.Obj) error {
|
||||
uri := "/adrive/v1.0/openFile/recyclebin/trash"
|
||||
if d.RemoveWay == "delete" {
|
||||
uri = "/adrive/v1.0/openFile/delete"
|
||||
}
|
||||
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": obj.GetID(),
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
return d.upload(ctx, dstDir, stream, up)
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
var resp base.Json
|
||||
var uri string
|
||||
data := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": args.Obj.GetID(),
|
||||
}
|
||||
switch args.Method {
|
||||
case "video_preview":
|
||||
uri = "/adrive/v1.0/openFile/getVideoPreviewPlayInfo"
|
||||
data["category"] = "live_transcoding"
|
||||
data["url_expire_sec"] = 14400
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*AliyundriveOpen)(nil)
|
42
drivers/aliyundrive_open/meta.go
Normal file
42
drivers/aliyundrive_open/meta.go
Normal file
@ -0,0 +1,42 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"default"`
|
||||
driver.RootID
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
|
||||
OauthTokenURL string `json:"oauth_token_url" default:"https://api.xhofe.top/alist/ali_open/token"`
|
||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
|
||||
RapidUpload bool `json:"rapid_upload" help:"If you enable this option, the file will be uploaded to the server first, so the progress will be incorrect"`
|
||||
InternalUpload bool `json:"internal_upload" help:"If you are using Aliyun ECS is located in Beijing, you can turn it on to boost the upload speed"`
|
||||
LIVPDownloadFormat string `json:"livp_download_format" type:"select" options:"jpeg,mov" default:"jpeg"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "AliyundriveOpen",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "root",
|
||||
NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &AliyundriveOpen{
|
||||
base: "https://openapi.aliyundrive.com",
|
||||
}
|
||||
})
|
||||
}
|
69
drivers/aliyundrive_open/types.go
Normal file
69
drivers/aliyundrive_open/types.go
Normal file
@ -0,0 +1,69 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type ErrResp struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Files struct {
|
||||
Items []File `json:"items"`
|
||||
NextMarker string `json:"next_marker"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
DriveId string `json:"drive_id"`
|
||||
FileId string `json:"file_id"`
|
||||
ParentFileId string `json:"parent_file_id"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
FileExtension string `json:"file_extension"`
|
||||
ContentHash string `json:"content_hash"`
|
||||
Category string `json:"category"`
|
||||
Type string `json:"type"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Url string `json:"url"`
|
||||
CreatedAt *time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
func fileToObj(f File) *model.ObjThumb {
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: f.FileId,
|
||||
Name: f.Name,
|
||||
Size: f.Size,
|
||||
Modified: f.UpdatedAt,
|
||||
IsFolder: f.Type == "folder",
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
|
||||
}
|
||||
}
|
||||
|
||||
type PartInfo struct {
|
||||
Etag interface{} `json:"etag"`
|
||||
PartNumber int `json:"part_number"`
|
||||
PartSize interface{} `json:"part_size"`
|
||||
UploadUrl string `json:"upload_url"`
|
||||
ContentType string `json:"content_type"`
|
||||
}
|
||||
|
||||
type CreateResp struct {
|
||||
//Type string `json:"type"`
|
||||
//ParentFileId string `json:"parent_file_id"`
|
||||
//DriveId string `json:"drive_id"`
|
||||
FileId string `json:"file_id"`
|
||||
//RevisionId string `json:"revision_id"`
|
||||
//EncryptMode string `json:"encrypt_mode"`
|
||||
//DomainId string `json:"domain_id"`
|
||||
//FileName string `json:"file_name"`
|
||||
UploadId string `json:"upload_id"`
|
||||
//Location string `json:"location"`
|
||||
RapidUpload bool `json:"rapid_upload"`
|
||||
PartInfoList []PartInfo `json:"part_info_list"`
|
||||
}
|
268
drivers/aliyundrive_open/upload.go
Normal file
268
drivers/aliyundrive_open/upload.go
Normal file
@ -0,0 +1,268 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha1"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func makePartInfos(size int) []base.Json {
|
||||
partInfoList := make([]base.Json, size)
|
||||
for i := 0; i < size; i++ {
|
||||
partInfoList[i] = base.Json{"part_number": 1 + i}
|
||||
}
|
||||
return partInfoList
|
||||
}
|
||||
|
||||
func calPartSize(fileSize int64) int64 {
|
||||
var partSize int64 = 20 * 1024 * 1024
|
||||
if fileSize > partSize {
|
||||
if fileSize > 1*1024*1024*1024*1024 { // file Size over 1TB
|
||||
partSize = 5 * 1024 * 1024 * 1024 // file part size 5GB
|
||||
} else if fileSize > 768*1024*1024*1024 { // over 768GB
|
||||
partSize = 109951163 // ≈ 104.8576MB, split 1TB into 10,000 part
|
||||
} else if fileSize > 512*1024*1024*1024 { // over 512GB
|
||||
partSize = 82463373 // ≈ 78.6432MB
|
||||
} else if fileSize > 384*1024*1024*1024 { // over 384GB
|
||||
partSize = 54975582 // ≈ 52.4288MB
|
||||
} else if fileSize > 256*1024*1024*1024 { // over 256GB
|
||||
partSize = 41231687 // ≈ 39.3216MB
|
||||
} else if fileSize > 128*1024*1024*1024 { // over 128GB
|
||||
partSize = 27487791 // ≈ 26.2144MB
|
||||
}
|
||||
}
|
||||
return partSize
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
|
||||
partInfoList := makePartInfos(count)
|
||||
var resp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": fileId,
|
||||
"part_info_list": partInfoList,
|
||||
"upload_id": uploadId,
|
||||
}).SetResult(&resp)
|
||||
})
|
||||
return resp.PartInfoList, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
|
||||
partInfo := resp.PartInfoList[i-1]
|
||||
uploadUrl := partInfo.UploadUrl
|
||||
if d.InternalUpload {
|
||||
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
if retry {
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
return err
|
||||
}
|
||||
res.Body.Close()
|
||||
if retry && res.StatusCode == http.StatusForbidden {
|
||||
resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
|
||||
return fmt.Errorf("upload status: %d", res.StatusCode)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) normalUpload(ctx context.Context, stream model.FileStreamer, up driver.UpdateProgress, createResp CreateResp, count int, partSize int64) error {
|
||||
log.Debugf("[aliyundive_open] normal upload")
|
||||
// 2. upload
|
||||
preTime := time.Now()
|
||||
for i := 1; i <= len(createResp.PartInfoList); i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
err := d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, partSize)), &createResp, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if count > 0 {
|
||||
up(i * 100 / count)
|
||||
}
|
||||
// refresh upload url if 50 minutes passed
|
||||
if time.Since(preTime) > 50*time.Minute {
|
||||
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
preTime = time.Now()
|
||||
}
|
||||
}
|
||||
// 3. complete
|
||||
_, err := d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": createResp.FileId,
|
||||
"upload_id": createResp.UploadId,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
type ProofRange struct {
|
||||
Start int64
|
||||
End int64
|
||||
}
|
||||
|
||||
func getProofRange(input string, size int64) (*ProofRange, error) {
|
||||
if size == 0 {
|
||||
return &ProofRange{}, nil
|
||||
}
|
||||
tmpStr := utils.GetMD5EncodeStr(input)[0:16]
|
||||
tmpInt, err := strconv.ParseUint(tmpStr, 16, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
index := tmpInt % uint64(size)
|
||||
pr := &ProofRange{
|
||||
Start: int64(index),
|
||||
End: int64(index) + 8,
|
||||
}
|
||||
if pr.End >= size {
|
||||
pr.End = size
|
||||
}
|
||||
return pr, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) calProofCode(file *os.File, fileSize int64) (string, error) {
|
||||
proofRange, err := getProofRange(d.AccessToken, fileSize)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
buf := make([]byte, proofRange.End-proofRange.Start)
|
||||
_, err = file.ReadAt(buf, proofRange.Start)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(buf), nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// 1. create
|
||||
// Part Size Unit: Bytes, Default: 20MB,
|
||||
// Maximum number of slices 10,000, ≈195.3125GB
|
||||
var partSize = calPartSize(stream.GetSize())
|
||||
createData := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"parent_file_id": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"check_name_mode": "ignore",
|
||||
}
|
||||
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
||||
createData["part_info_list"] = makePartInfos(count)
|
||||
// rapid upload
|
||||
rapidUpload := stream.GetSize() > 100*1024 && d.RapidUpload
|
||||
if rapidUpload {
|
||||
log.Debugf("[aliyundrive_open] start cal pre_hash")
|
||||
// read 1024 bytes to calculate pre hash
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||
_, err := io.CopyN(buf, stream, 1024)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createData["size"] = stream.GetSize()
|
||||
createData["pre_hash"] = utils.GetSHA1Encode(buf.Bytes())
|
||||
// if support seek, seek to start
|
||||
if localFile, ok := stream.(io.Seeker); ok {
|
||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Put spliced head back to stream
|
||||
stream.SetReadCloser(struct {
|
||||
io.Reader
|
||||
io.Closer
|
||||
}{
|
||||
Reader: io.MultiReader(buf, stream.GetReadCloser()),
|
||||
Closer: stream.GetReadCloser(),
|
||||
})
|
||||
}
|
||||
}
|
||||
var createResp CreateResp
|
||||
_, err, e := d.requestReturnErrResp("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
if e.Code != "PreHashMatched" || !rapidUpload {
|
||||
return err
|
||||
}
|
||||
log.Debugf("[aliyundrive_open] pre_hash matched, start rapid upload")
|
||||
// convert to local file
|
||||
file, err := utils.CreateTempFile(stream, stream.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = stream.GetReadCloser().Close()
|
||||
stream.SetReadCloser(file)
|
||||
// calculate full hash
|
||||
h := sha1.New()
|
||||
_, err = io.Copy(h, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
delete(createData, "pre_hash")
|
||||
createData["proof_version"] = "v1"
|
||||
createData["content_hash_name"] = "sha1"
|
||||
createData["content_hash"] = hex.EncodeToString(h.Sum(nil))
|
||||
// seek to start
|
||||
if _, err = file.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
createData["proof_code"], err = d.calProofCode(file, stream.GetSize())
|
||||
if err != nil {
|
||||
return fmt.Errorf("cal proof code error: %s", err.Error())
|
||||
}
|
||||
_, err = d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if createResp.RapidUpload {
|
||||
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
|
||||
return nil
|
||||
}
|
||||
// failed to rapid upload, try normal upload
|
||||
if _, err = file.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
log.Debugf("[aliyundrive_open] create file success, resp: %+v", createResp)
|
||||
return d.normalUpload(ctx, stream, up, createResp, count, partSize)
|
||||
}
|
129
drivers/aliyundrive_open/util.go
Normal file
129
drivers/aliyundrive_open/util.go
Normal file
@ -0,0 +1,129 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
func (d *AliyundriveOpen) refreshToken() error {
|
||||
url := d.base + "/oauth/access_token"
|
||||
if d.OauthTokenURL != "" && d.ClientID == "" {
|
||||
url = d.OauthTokenURL
|
||||
}
|
||||
//var resp base.TokenResp
|
||||
var e ErrResp
|
||||
res, err := base.RestyClient.R().
|
||||
ForceContentType("application/json").
|
||||
SetBody(base.Json{
|
||||
"client_id": d.ClientID,
|
||||
"client_secret": d.ClientSecret,
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": d.RefreshToken,
|
||||
}).
|
||||
//SetResult(&resp).
|
||||
SetError(&e).
|
||||
Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("[ali_open] refresh token response: %s", res.String())
|
||||
if e.Code != "" {
|
||||
return fmt.Errorf("failed to refresh token: %s", e.Message)
|
||||
}
|
||||
refresh, access := utils.Json.Get(res.Body(), "refresh_token").ToString(), utils.Json.Get(res.Body(), "access_token").ToString()
|
||||
if refresh == "" {
|
||||
return errors.New("failed to refresh token: refresh token is empty")
|
||||
}
|
||||
d.RefreshToken, d.AccessToken = refresh, access
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||
b, err, _ := d.requestReturnErrResp(uri, method, callback, retry...)
|
||||
return b, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error, *ErrResp) {
|
||||
req := base.RestyClient.R()
|
||||
// TODO check whether access_token is expired
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
if method == http.MethodPost {
|
||||
req.SetHeader("Content-Type", "application/json")
|
||||
}
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
var e ErrResp
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, d.base+uri)
|
||||
if err != nil {
|
||||
if res != nil {
|
||||
log.Errorf("[aliyundrive_open] request error: %s", res.String())
|
||||
}
|
||||
return nil, err, nil
|
||||
}
|
||||
isRetry := len(retry) > 0 && retry[0]
|
||||
if e.Code != "" {
|
||||
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
|
||||
err = d.refreshToken()
|
||||
if err != nil {
|
||||
return nil, err, nil
|
||||
}
|
||||
return d.requestReturnErrResp(uri, method, callback, true)
|
||||
}
|
||||
return nil, fmt.Errorf("%s:%s", e.Code, e.Message), &e
|
||||
}
|
||||
return res.Body(), nil, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) list(ctx context.Context, data base.Json) (*Files, error) {
|
||||
var resp Files
|
||||
_, err := d.request("/adrive/v1.0/openFile/list", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getFiles(ctx context.Context, fileId string) ([]File, error) {
|
||||
marker := "first"
|
||||
res := make([]File, 0)
|
||||
for marker != "" {
|
||||
if marker == "first" {
|
||||
marker = ""
|
||||
}
|
||||
data := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"limit": 200,
|
||||
"marker": marker,
|
||||
"order_by": d.OrderBy,
|
||||
"order_direction": d.OrderDirection,
|
||||
"parent_file_id": fileId,
|
||||
//"category": "",
|
||||
//"type": "",
|
||||
//"video_thumbnail_time": 120000,
|
||||
//"video_thumbnail_width": 480,
|
||||
//"image_thumbnail_width": 480,
|
||||
}
|
||||
resp, err := d.limitList(ctx, data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
marker = resp.NextMarker
|
||||
res = append(res, resp.Items...)
|
||||
}
|
||||
return res, nil
|
||||
}
|
147
drivers/aliyundrive_share/driver.go
Normal file
147
drivers/aliyundrive_share/driver.go
Normal file
@ -0,0 +1,147 @@
|
||||
package aliyundrive_share
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Xhofe/rateg"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/cron"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type AliyundriveShare struct {
|
||||
model.Storage
|
||||
Addition
|
||||
AccessToken string
|
||||
ShareToken string
|
||||
DriveId string
|
||||
cron *cron.Cron
|
||||
|
||||
limitList func(ctx context.Context, dir model.Obj) ([]model.Obj, error)
|
||||
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) Init(ctx context.Context) error {
|
||||
err := d.refreshToken()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = d.getShareToken()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.cron = cron.NewCron(time.Hour * 2)
|
||||
d.cron.Do(func() {
|
||||
err := d.refreshToken()
|
||||
if err != nil {
|
||||
log.Errorf("%+v", err)
|
||||
}
|
||||
})
|
||||
d.limitList = rateg.LimitFnCtx(d.list, rateg.LimitFnOption{
|
||||
Limit: 4,
|
||||
Bucket: 1,
|
||||
})
|
||||
d.limitLink = rateg.LimitFnCtx(d.link, rateg.LimitFnOption{
|
||||
Limit: 1,
|
||||
Bucket: 1,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) Drop(ctx context.Context) error {
|
||||
if d.cron != nil {
|
||||
d.cron.Stop()
|
||||
}
|
||||
d.DriveId = ""
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if d.limitList == nil {
|
||||
return nil, fmt.Errorf("driver not init")
|
||||
}
|
||||
return d.limitList(ctx, dir)
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) list(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return fileToObj(src), nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if d.limitLink == nil {
|
||||
return nil, fmt.Errorf("driver not init")
|
||||
}
|
||||
return d.limitLink(ctx, file)
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Link, error) {
|
||||
data := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": file.GetID(),
|
||||
// // Only ten minutes lifetime
|
||||
"expire_sec": 600,
|
||||
"share_id": d.ShareId,
|
||||
}
|
||||
var resp ShareLinkResp
|
||||
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{
|
||||
Header: http.Header{
|
||||
"Referer": []string{"https://www.aliyundrive.com/"},
|
||||
},
|
||||
URL: resp.DownloadUrl,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
var resp base.Json
|
||||
var url string
|
||||
data := base.Json{
|
||||
"share_id": d.ShareId,
|
||||
"file_id": args.Obj.GetID(),
|
||||
}
|
||||
switch args.Method {
|
||||
case "doc_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
||||
case "video_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
||||
data["category"] = "live_transcoding"
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*AliyundriveShare)(nil)
|
29
drivers/aliyundrive_share/meta.go
Normal file
29
drivers/aliyundrive_share/meta.go
Normal file
@ -0,0 +1,29 @@
|
||||
package aliyundrive_share
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
ShareId string `json:"share_id" required:"true"`
|
||||
SharePwd string `json:"share_pwd"`
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "AliyundriveShare",
|
||||
LocalSort: false,
|
||||
OnlyProxy: false,
|
||||
NoUpload: true,
|
||||
DefaultRoot: "root",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &AliyundriveShare{}
|
||||
})
|
||||
}
|
57
drivers/aliyundrive_share/types.go
Normal file
57
drivers/aliyundrive_share/types.go
Normal file
@ -0,0 +1,57 @@
|
||||
package aliyundrive_share
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type ErrorResp struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type ShareTokenResp struct {
|
||||
ShareToken string `json:"share_token"`
|
||||
ExpireTime time.Time `json:"expire_time"`
|
||||
ExpiresIn int `json:"expires_in"`
|
||||
}
|
||||
|
||||
type ListResp struct {
|
||||
Items []File `json:"items"`
|
||||
NextMarker string `json:"next_marker"`
|
||||
PunishedFileCount int `json:"punished_file_count"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
DriveId string `json:"drive_id"`
|
||||
DomainId string `json:"domain_id"`
|
||||
FileId string `json:"file_id"`
|
||||
ShareId string `json:"share_id"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
ParentFileId string `json:"parent_file_id"`
|
||||
Size int64 `json:"size"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
}
|
||||
|
||||
func fileToObj(f File) *model.ObjThumb {
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: f.FileId,
|
||||
Name: f.Name,
|
||||
Size: f.Size,
|
||||
Modified: f.UpdatedAt,
|
||||
IsFolder: f.Type == "folder",
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
|
||||
}
|
||||
}
|
||||
|
||||
type ShareLinkResp struct {
|
||||
DownloadUrl string `json:"download_url"`
|
||||
Url string `json:"url"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
}
|
141
drivers/aliyundrive_share/util.go
Normal file
141
drivers/aliyundrive_share/util.go
Normal file
@ -0,0 +1,141 @@
|
||||
package aliyundrive_share
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const (
|
||||
// CanaryHeaderKey CanaryHeaderValue for lifting rate limit restrictions
|
||||
CanaryHeaderKey = "X-Canary"
|
||||
CanaryHeaderValue = "client=web,app=share,version=v2.3.1"
|
||||
)
|
||||
|
||||
func (d *AliyundriveShare) refreshToken() error {
|
||||
url := "https://auth.aliyundrive.com/v2/account/token"
|
||||
var resp base.TokenResp
|
||||
var e ErrorResp
|
||||
_, err := base.RestyClient.R().
|
||||
SetBody(base.Json{"refresh_token": d.RefreshToken, "grant_type": "refresh_token"}).
|
||||
SetResult(&resp).
|
||||
SetError(&e).
|
||||
Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.Code != "" {
|
||||
return fmt.Errorf("failed to refresh token: %s", e.Message)
|
||||
}
|
||||
d.RefreshToken, d.AccessToken = resp.RefreshToken, resp.AccessToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
func (d *AliyundriveShare) getShareToken() error {
|
||||
data := base.Json{
|
||||
"share_id": d.ShareId,
|
||||
}
|
||||
if d.SharePwd != "" {
|
||||
data["share_pwd"] = d.SharePwd
|
||||
}
|
||||
var e ErrorResp
|
||||
var resp ShareTokenResp
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).SetError(&e).SetBody(data).
|
||||
Post("https://api.aliyundrive.com/v2/share_link/get_share_token")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.Code != "" {
|
||||
return errors.New(e.Message)
|
||||
}
|
||||
d.ShareToken = resp.ShareToken
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) request(url, method string, callback base.ReqCallback) ([]byte, error) {
|
||||
var e ErrorResp
|
||||
req := base.RestyClient.R().
|
||||
SetError(&e).
|
||||
SetHeader("content-type", "application/json").
|
||||
SetHeader("Authorization", "Bearer\t"+d.AccessToken).
|
||||
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
||||
SetHeader("x-share-token", d.ShareToken)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
} else {
|
||||
req.SetBody("{}")
|
||||
}
|
||||
resp, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if e.Code != "" {
|
||||
if e.Code == "AccessTokenInvalid" || e.Code == "ShareLinkTokenInvalid" {
|
||||
if e.Code == "AccessTokenInvalid" {
|
||||
err = d.refreshToken()
|
||||
} else {
|
||||
err = d.getShareToken()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(url, method, callback)
|
||||
} else {
|
||||
return nil, errors.New(e.Code + ": " + e.Message)
|
||||
}
|
||||
}
|
||||
return resp.Body(), nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
|
||||
files := make([]File, 0)
|
||||
data := base.Json{
|
||||
"image_thumbnail_process": "image/resize,w_160/format,jpeg",
|
||||
"image_url_process": "image/resize,w_1920/format,jpeg",
|
||||
"limit": 200,
|
||||
"order_by": d.OrderBy,
|
||||
"order_direction": d.OrderDirection,
|
||||
"parent_file_id": fileId,
|
||||
"share_id": d.ShareId,
|
||||
"video_thumbnail_process": "video/snapshot,t_1000,f_jpg,ar_auto,w_300",
|
||||
"marker": "first",
|
||||
}
|
||||
for data["marker"] != "" {
|
||||
if data["marker"] == "first" {
|
||||
data["marker"] = ""
|
||||
}
|
||||
var e ErrorResp
|
||||
var resp ListResp
|
||||
res, err := base.RestyClient.R().
|
||||
SetHeader("x-share-token", d.ShareToken).
|
||||
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
||||
SetResult(&resp).SetError(&e).SetBody(data).
|
||||
Post("https://api.aliyundrive.com/adrive/v3/file/list")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debugf("aliyundrive share get files: %s", res.String())
|
||||
if e.Code != "" {
|
||||
if e.Code == "AccessTokenInvalid" || e.Code == "ShareLinkTokenInvalid" {
|
||||
err = d.getShareToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.getFiles(fileId)
|
||||
}
|
||||
return nil, errors.New(e.Message)
|
||||
}
|
||||
data["marker"] = resp.NextMarker
|
||||
files = append(files, resp.Items...)
|
||||
}
|
||||
if len(files) > 0 && d.DriveId == "" {
|
||||
d.DriveId = files[0].DriveId
|
||||
}
|
||||
return files, nil
|
||||
}
|
@ -1,25 +1,52 @@
|
||||
package drivers
|
||||
|
||||
import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/115"
|
||||
_ "github.com/alist-org/alist/v3/drivers/123"
|
||||
_ "github.com/alist-org/alist/v3/drivers/123_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/139"
|
||||
_ "github.com/alist-org/alist/v3/drivers/189"
|
||||
_ "github.com/alist-org/alist/v3/drivers/189pc"
|
||||
_ "github.com/alist-org/alist/v3/drivers/alias"
|
||||
_ "github.com/alist-org/alist/v3/drivers/alist_v2"
|
||||
_ "github.com/alist-org/alist/v3/drivers/alist_v3"
|
||||
_ "github.com/alist-org/alist/v3/drivers/aliyundrive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/aliyundrive_open"
|
||||
_ "github.com/alist-org/alist/v3/drivers/aliyundrive_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
||||
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
|
||||
_ "github.com/alist-org/alist/v3/drivers/lanzou"
|
||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mega"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||
_ "github.com/alist-org/alist/v3/drivers/quark"
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
|
||||
_ "github.com/alist-org/alist/v3/drivers/s3"
|
||||
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
||||
_ "github.com/alist-org/alist/v3/drivers/sftp"
|
||||
_ "github.com/alist-org/alist/v3/drivers/smb"
|
||||
_ "github.com/alist-org/alist/v3/drivers/teambition"
|
||||
_ "github.com/alist-org/alist/v3/drivers/terabox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/thunder"
|
||||
_ "github.com/alist-org/alist/v3/drivers/trainbit"
|
||||
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
||||
_ "github.com/alist-org/alist/v3/drivers/uss"
|
||||
_ "github.com/alist-org/alist/v3/drivers/virtual"
|
||||
_ "github.com/alist-org/alist/v3/drivers/webdav"
|
||||
_ "github.com/alist-org/alist/v3/drivers/weiyun"
|
||||
_ "github.com/alist-org/alist/v3/drivers/wopan"
|
||||
_ "github.com/alist-org/alist/v3/drivers/yandex_disk"
|
||||
)
|
||||
|
||||
|
@ -1,48 +1,47 @@
|
||||
package baidu_netdisk
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/avast/retry-go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"io"
|
||||
"math"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type BaiduNetdisk struct {
|
||||
model.Storage
|
||||
Addition
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
const BaiduFileAPI = "https://d.pcs.baidu.com/rest/2.0/pcs/superfile2"
|
||||
const DefaultSliceSize int64 = 4 * 1024 * 1024
|
||||
|
||||
func (d *BaiduNetdisk) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) GetAddition() driver.Additional {
|
||||
return d.Addition
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) Init(ctx context.Context, storage model.Storage) error {
|
||||
d.Storage = storage
|
||||
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return d.refreshToken()
|
||||
func (d *BaiduNetdisk) Init(ctx context.Context) error {
|
||||
res, err := d.get("/xpan/nas", map[string]string{
|
||||
"method": "uinfo",
|
||||
}, nil)
|
||||
log.Debugf("[baidu] get uinfo: %s", string(res))
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) Drop(ctx context.Context) error {
|
||||
@ -59,11 +58,6 @@ func (d *BaiduNetdisk) List(ctx context.Context, dir model.Obj, args model.ListA
|
||||
})
|
||||
}
|
||||
|
||||
//func (d *BaiduNetdisk) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
// // this is optional
|
||||
// return nil, errs.NotImplement
|
||||
//}
|
||||
|
||||
func (d *BaiduNetdisk) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if d.DownloadAPI == "crack" {
|
||||
return d.linkCrack(file, args)
|
||||
@ -100,12 +94,11 @@ func (d *BaiduNetdisk) Rename(ctx context.Context, srcObj model.Obj, newName str
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
dest, newname := stdpath.Split(dstDir.GetPath())
|
||||
data := []base.Json{
|
||||
{
|
||||
"path": srcObj.GetPath(),
|
||||
"dest": dest,
|
||||
"newname": newname,
|
||||
"dest": dstDir.GetPath(),
|
||||
"newname": srcObj.GetName(),
|
||||
},
|
||||
}
|
||||
_, err := d.manage("copy", data)
|
||||
@ -119,65 +112,47 @@ func (d *BaiduNetdisk) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
var tempFile *os.File
|
||||
var err error
|
||||
if f, ok := stream.GetReadCloser().(*os.File); ok {
|
||||
tempFile = f
|
||||
} else {
|
||||
tempFile, err = os.CreateTemp(conf.Conf.TempDir, "file-*")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
_, err = io.Copy(tempFile, stream)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
streamSize := stream.GetSize()
|
||||
|
||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser(), stream.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var Default int64 = 4 * 1024 * 1024
|
||||
defaultByteData := make([]byte, Default)
|
||||
count := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
|
||||
var SliceSize int64 = 256 * 1024
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
|
||||
count := int(math.Ceil(float64(streamSize) / float64(DefaultSliceSize)))
|
||||
//cal md5 for first 256k data
|
||||
const SliceSize int64 = 256 * 1024
|
||||
// cal md5
|
||||
h1 := md5.New()
|
||||
h2 := md5.New()
|
||||
block_list := make([]string, 0)
|
||||
content_md5 := ""
|
||||
slice_md5 := ""
|
||||
left := stream.GetSize()
|
||||
blockList := make([]string, 0)
|
||||
contentMd5 := ""
|
||||
sliceMd5 := ""
|
||||
left := streamSize
|
||||
for i := 0; i < count; i++ {
|
||||
byteSize := Default
|
||||
var byteData []byte
|
||||
if left < Default {
|
||||
byteSize := DefaultSliceSize
|
||||
if left < DefaultSliceSize {
|
||||
byteSize = left
|
||||
byteData = make([]byte, byteSize)
|
||||
} else {
|
||||
byteData = defaultByteData
|
||||
}
|
||||
left -= byteSize
|
||||
_, err = io.ReadFull(tempFile, byteData)
|
||||
_, err = io.Copy(io.MultiWriter(h1, h2), io.LimitReader(tempFile, byteSize))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
h1.Write(byteData)
|
||||
h2.Write(byteData)
|
||||
block_list = append(block_list, fmt.Sprintf("\"%s\"", hex.EncodeToString(h2.Sum(nil))))
|
||||
blockList = append(blockList, fmt.Sprintf("\"%s\"", hex.EncodeToString(h2.Sum(nil))))
|
||||
h2.Reset()
|
||||
}
|
||||
content_md5 = hex.EncodeToString(h1.Sum(nil))
|
||||
contentMd5 = hex.EncodeToString(h1.Sum(nil))
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if stream.GetSize() <= SliceSize {
|
||||
slice_md5 = content_md5
|
||||
if streamSize <= SliceSize {
|
||||
sliceMd5 = contentMd5
|
||||
} else {
|
||||
sliceData := make([]byte, SliceSize)
|
||||
_, err = io.ReadFull(tempFile, sliceData)
|
||||
@ -185,21 +160,19 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||
return err
|
||||
}
|
||||
h2.Write(sliceData)
|
||||
slice_md5 = hex.EncodeToString(h2.Sum(nil))
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sliceMd5 = hex.EncodeToString(h2.Sum(nil))
|
||||
}
|
||||
path := encodeURIComponent(stdpath.Join(dstDir.GetPath(), stream.GetName()))
|
||||
block_list_str := fmt.Sprintf("[%s]", strings.Join(block_list, ","))
|
||||
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
||||
path := encodeURIComponent(rawPath)
|
||||
block_list_str := fmt.Sprintf("[%s]", strings.Join(blockList, ","))
|
||||
data := fmt.Sprintf("path=%s&size=%d&isdir=0&autoinit=1&block_list=%s&content-md5=%s&slice-md5=%s",
|
||||
path, stream.GetSize(),
|
||||
path, streamSize,
|
||||
block_list_str,
|
||||
content_md5, slice_md5)
|
||||
contentMd5, sliceMd5)
|
||||
params := map[string]string{
|
||||
"method": "precreate",
|
||||
}
|
||||
log.Debugf("[baidu_netdisk] precreate data: %s", data)
|
||||
var precreateResp PrecreateResp
|
||||
_, err = d.post("/xpan/file", params, data, &precreateResp)
|
||||
if err != nil {
|
||||
@ -207,6 +180,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||
}
|
||||
log.Debugf("%+v", precreateResp)
|
||||
if precreateResp.ReturnType == 2 {
|
||||
//rapid upload, since got md5 match from baidu server
|
||||
return nil
|
||||
}
|
||||
params = map[string]string{
|
||||
@ -216,35 +190,49 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||
"path": path,
|
||||
"uploadid": precreateResp.Uploadid,
|
||||
}
|
||||
left = stream.GetSize()
|
||||
for _, partseq := range precreateResp.BlockList {
|
||||
byteSize := Default
|
||||
var byteData []byte
|
||||
if left < Default {
|
||||
byteSize = left
|
||||
byteData = make([]byte, byteSize)
|
||||
} else {
|
||||
byteData = defaultByteData
|
||||
}
|
||||
left -= byteSize
|
||||
_, err = io.ReadFull(tempFile, byteData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
u := "https://d.pcs.baidu.com/rest/2.0/pcs/superfile2"
|
||||
|
||||
var offset int64 = 0
|
||||
for i, partseq := range precreateResp.BlockList {
|
||||
params["partseq"] = strconv.Itoa(partseq)
|
||||
res, err := base.RestyClient.R().SetQueryParams(params).SetFileReader("file", stream.GetName(), bytes.NewReader(byteData)).Post(u)
|
||||
byteSize := int64(math.Min(float64(streamSize-offset), float64(DefaultSliceSize)))
|
||||
err := retry.Do(func() error {
|
||||
return d.uploadSlice(ctx, ¶ms, stream.GetName(), tempFile, offset, byteSize)
|
||||
},
|
||||
retry.Context(ctx),
|
||||
retry.Attempts(3))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugln(res.String())
|
||||
offset += byteSize
|
||||
|
||||
if len(precreateResp.BlockList) > 0 {
|
||||
up(i * 100 / len(precreateResp.BlockList))
|
||||
}
|
||||
}
|
||||
_, err = d.create(path, stream.GetSize(), 0, precreateResp.Uploadid, block_list_str)
|
||||
_, err = d.create(rawPath, streamSize, 0, precreateResp.Uploadid, block_list_str)
|
||||
return err
|
||||
}
|
||||
func (d *BaiduNetdisk) uploadSlice(ctx context.Context, params *map[string]string, fileName string, file *os.File, offset int64, byteSize int64) error {
|
||||
_, err := file.Seek(offset, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
return nil, errs.NotSupport
|
||||
res, err := base.RestyClient.R().
|
||||
SetContext(ctx).
|
||||
SetQueryParams(*params).
|
||||
SetFileReader("file", fileName, io.LimitReader(file, byteSize)).
|
||||
Post(BaiduFileAPI)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugln(res.RawResponse.Status + res.String())
|
||||
errCode := utils.Json.Get(res.Body(), "error_code").ToInt()
|
||||
errNo := utils.Json.Get(res.Body(), "errno").ToInt()
|
||||
if errCode != 0 || errNo != 0 {
|
||||
return errs.NewErr(errs.StreamIncomplete, "error in uploading to baidu, will retry. response=%s", res.String())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*BaiduNetdisk)(nil)
|
||||
|
@ -13,6 +13,8 @@ type Addition struct {
|
||||
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
|
||||
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
||||
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
||||
CustomCrackUA string `json:"custom_crack_ua" required:"true" default:"netdisk"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
@ -20,10 +22,8 @@ var config = driver.Config{
|
||||
DefaultRoot: "/",
|
||||
}
|
||||
|
||||
func New() driver.Driver {
|
||||
return &BaiduNetdisk{}
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(config, New)
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &BaiduNetdisk{}
|
||||
})
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user