Compare commits

..

195 Commits

Author SHA1 Message Date
ae6984714d fix: remove default polyfill (#6130 close #6100)
* refactor(setting): replace `polyfill.io``

* fix: remove default polyfill

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2024-03-02 15:36:28 +08:00
d0f88bd1cb feat: s3 server support (#6088 close #5186)
Currently tested: List, Get, Remove
2024-03-02 15:35:10 +08:00
f8b1f87a5f fix: support for Microsoft WebDAV (#6133 close #6104)
* Add support for Microsoft WebDAV

* add import
2024-03-02 14:59:55 +08:00
71e4e1ab6e fix(chaoxing): json cannot unmarshal content.uploadDate (close #6119 in #6124) 2024-03-01 13:37:09 +08:00
7e6522c81e ci: build ffmpeg image with dev version 2024-02-24 18:10:45 +08:00
94a80bccfe fix(feiji): unable to get link (close #6082) 2024-02-24 18:04:08 +08:00
e66abb3f58 fix(deps): update module github.com/aws/aws-sdk-go to v1.50.24 (#5873)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-02-23 20:56:14 +08:00
742335f80e fix: don't push docker on pr due to security 2024-02-23 15:42:52 +08:00
f1979a8bbc feat(search): search with meilisearch (#6060)
* feat(search): search with meilisearch.

* feat(search): meilisearch supports auto update.

* chores: remove utils.Log.

* fix(search): the null pointer caused by deleting non-existing file/folder indexes.

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2024-02-23 15:37:40 +08:00
1f835502ba feat: support customize dsn for mysql and pg (#6031)
* support for unixsocket to connect to mysql

* feat: customize dsn for mysql and pg

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2024-02-23 15:28:48 +08:00
424ab2d0c0 ci: remove docker latest tag on dev 2024-02-21 15:50:05 +08:00
858ba19670 ci: also push docker to hub for pr 2024-02-21 14:58:45 +08:00
Mmx
0c7e47a76c feat: add docker image with pre-installed ffmpeg (#6054)
* build: add dockerfile for ffmpeg version

* ci: add docker image with ffmpeg release

* fix: donnot push on docker build test
2024-02-21 14:04:22 +08:00
53926d5cd0 fix(search): duplicate folder on autoupdate (#6063 close #6062)
* fix(search): the problem of not returning in time when index does not support auto update.

* fix(search): the problem of duplicate indexing of folders.
2024-02-20 19:12:07 +08:00
47f4b05517 feat(sftp): allow ignore symlink error (close #6026) 2024-02-15 18:54:19 +08:00
6d85f1b0c0 fix(123): User-Agent and rate limit (#6012)
* 修复标签

* 新增接口限流器。防止云盘云端把Alist当做攻击,封禁Alist客户端

---------

Co-authored-by: 风信子 <fengxinzi@xaidc.com>
2024-02-09 14:45:44 +08:00
e49fda3e2a fix: WebDAV's creation date should use RFC3339 format (#6015 close #5878) 2024-02-08 19:22:29 +08:00
da5e35578a fix: embed all files of dist 2024-02-03 19:44:50 +08:00
812f58ae6d fix(mopan): client version is too low (#5987)
* fix(mopan): download err ` client version is too low`

* feat(mopan):support sms login

* refactor(quqi): upload use s3
2024-02-02 21:04:43 +08:00
9bd3c87bcc fix(ldap): exiting by peer exception occurred during the TLS connection(#5977) 2024-02-01 10:43:08 +08:00
c82866975e fix: error on repeated reading static (#5957)
* Update static.go

* rm initial value of static

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2024-01-30 21:21:53 +08:00
aef952ae68 feat(dropbox): add root_namespace_id to access teams folder (#5929)
* feat(dropbox): add root_namespace_id to access teams folder

* fix(dropbox): get_current_account API request

* feat(dropbox): extract root_namespace_id properly

* style: format code
2024-01-24 17:03:50 +08:00
9222510d8d feat(quqi): add download link with cdn (#5938)
* feat(quqi): add download link by cdn

* fix(quqi): cookie error when login with phone number
2024-01-24 16:47:49 +08:00
d88b54d98a fix(quqi): empty file link for non vip user (#5926)
* fix(quqi): error returned when uploading a file that existed

* fix empty download link for no vip user

* fix cannot parse request result

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2024-01-21 15:28:52 +08:00
85a28d9822 fix(quqi): error on uploading an existing file (#5920) 2024-01-20 21:22:50 +08:00
4f7761fe2c fix: set progress to 100 when it's NaN (close #5906) 2024-01-20 13:06:46 +08:00
a8c900d09e fix(quqi): file extension duplication when rename and some missing form parameters (#5910)
* feat: add `quqi` driver

* change signature of request function

* specific header for every storage

* todo: real upload

* fix upload method

* fix incorrect parameters for some request function calls

* refine some form parameters to avoid potential problems

* fix file extension duplication in rename function

* improve the error message in login function

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2024-01-19 13:57:31 +08:00
8bccb69e8d fix(google_photo): add support for streaming video, range requests (#5905)
* Update util.go

Return mediaMetadata

* Update driver.go

Using width and height
2024-01-19 13:02:05 +08:00
0f29a811bf fix: s3 upload exceeded total allowed configured MaxUploadParts (close #5909) 2024-01-19 12:05:10 +08:00
442c2f77ea feat: add quqi driver (#5899 close #5251)
* feat: add `quqi` driver

* change signature of request function

* specific header for every storage

* todo: real upload

* fix upload method

* fix incorrect parameters for some request function calls

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2024-01-19 10:59:56 +08:00
ce06f394f1 fix: missing salt of guest user (close #5737) 2024-01-17 14:15:34 +08:00
e3e790f461 feat(115): add QR code source selection (#5891)
* feat(115): add QR code source selection

closed #5386

* feat(115_share): add QR code source selection
2024-01-16 15:59:44 +08:00
f0e8c0e886 fix(chaoxing): JSON parsing error in content field (#5877)
* fix(chaoxing):fix JSON parsing error in `content` field

* fix(chaoxing): optimizing `UnmarshalJSON` implementation

* fix(chaoxing): use `objectID` when  is empty
2024-01-14 12:53:31 +08:00
86b35ae5cf fix(deps): update module golang.org/x/oauth2 to v0.16.0 (#5865)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-12 11:54:35 +08:00
4930f85b90 fix(deps): update module golang.org/x/crypto to v0.18.0 (#5863)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-11 20:56:19 +08:00
85fe65951d fix(deps): update golang.org/x/exp digest to 0dcbfd6 (#5862)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-11 20:39:16 +08:00
1381e8fb27 fix(deps): update module github.com/aws/aws-sdk-go to v1.49.18 (#5848)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-11 20:24:20 +08:00
292bbe94ee fix(feijipan): incorrect address of download link (close #5859) 2024-01-11 10:16:14 +08:00
bb6747de4e docs: add feijipan to Readme 2024-01-11 10:15:16 +08:00
555ef0eb1a feat: add feijipan driver (close #5856) 2024-01-10 16:58:10 +08:00
bff56ffd0f ci: add android target to release build (#5844)
* build: build android

Signed-off-by: lateautumn233 <lateautumn233@foxmail.com>

* ci: add `android` target to release build

Signed-off-by: lateautumn233 <lateautumn233@foxmail.com>

---------

Signed-off-by: lateautumn233 <lateautumn233@foxmail.com>
2024-01-09 19:00:11 +08:00
34b73b94f7 feat(local): allow specifying the recycle bin path (close #5832) 2024-01-09 18:51:21 +08:00
434892f135 fix(deps): update module github.com/aliyun/aliyun-oss-go-sdk to v3 (#5800)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-09 17:14:31 +08:00
Mmx
e6e2d03ba1 perf: make docker release 10 times faster (#5803)
* build: improve multistage docker build

* build: add dockerfile for ci

* build: add BuildDockerMultiplatform function in build.sh for ci

* ci: change build method

* build: add missing mod download command to the Dockerfile

* build: revert changes made ffmpeg installed

* build: use musl build for docker release

* ci: apply to dev version

* fix: don't login on pr

* fix: don't build_docker_with_aria2 on pr

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2024-01-05 15:52:30 +08:00
28bb3f6310 fix(deps): update module golang.org/x/image to v0.15.0 (#5825)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-05 15:35:41 +08:00
fb729c1846 fix(deps): update module github.com/aws/aws-sdk-go to v1.49.15 (#5816)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-05 15:34:16 +08:00
4448e08f5b fix(net): Buf use Mutex (#5823)
Co-authored-by: Andy Hsu <i@nn.ci>
2024-01-05 12:20:08 +08:00
8020d42b10 fix: panic due to send on closed channel (close #5729) 2024-01-05 11:41:53 +08:00
9d5fb7f595 feat: add ILanzou driver (#5810 close #5715)
* wip: basic request and login

* feat: impl list

* feat: impl link

* feat: impl mkdir, move, rename, delete

* feat: impl upload

* docs: add iLanzou to readme
2024-01-04 22:03:15 +08:00
126cfe9f93 fix(vtencent): only show 50 files (close #5805) 2024-01-04 21:54:39 +08:00
fd96a7ccf4 fix(deps): update golang.org/x/exp digest to be819d1 [skip ci] (#5807)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-04 21:02:22 +08:00
03b9b9a119 chore(deps): update docker/setup-qemu-action action to v3 (#5798)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-03 14:51:54 +08:00
03dbdfc0dd chore(deps): update docker/setup-buildx-action action to v3 (#5797)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-03 14:46:37 +08:00
2683621ed7 chore(deps): update docker/metadata-action action to v5 [skip ci] (#5795)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-03 14:46:09 +08:00
be537aa49b chore(deps): update docker/login-action action to v3 [skip ci] (#5794)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-03 14:45:53 +08:00
6f742a68cf chore(deps): update docker/build-push-action action to v5 [skip ci] (#5793)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-03 14:45:32 +08:00
97a4b8321d chore(deps): update actions/upload-artifact action to v4 (#5792)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-03 14:44:59 +08:00
8c432d3339 chore(deps): update actions/checkout action to v4 (#5788)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 17:43:22 +08:00
ff25e51f80 chore(deps): update actions/setup-go action to v5 (#5789)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 17:43:08 +08:00
88831b5d5a fix(deps): update module golang.org/x/oauth2 to v0.15.0 [skip ci] (#5785)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 15:44:45 +08:00
b97c9173af fix(deps): update module golang.org/x/image to v0.14.0 [skip ci] (#5784)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 15:42:38 +08:00
207c7e05fe fix(deps): update module github.com/spf13/cobra to v1.8.0 [skip ci] (#5783)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 15:42:17 +08:00
7db27e6da8 fix(deps): update module golang.org/x/time to v0.5.0 [skip ci] (#5786)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 15:41:52 +08:00
b5cc90cb5a fix(115): support null UserAgent (#5787) 2024-01-02 15:41:32 +08:00
8a427ddc49 fix(deps): update module github.com/go-webauthn/webauthn to v0.10.0 [skip ci] (#5782)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 14:54:38 +08:00
c36644a172 fix(deps): update module github.com/go-resty/resty/v2 to v2.11.0 (#5781)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 14:27:08 +08:00
45b1ff4a24 fix(deps): update module github.com/charmbracelet/bubbles to v0.17.1 [skip ci] (#5775)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 14:24:56 +08:00
a4a9675616 fix(deps): update module github.com/charmbracelet/bubbletea to v0.25.0 [skip ci] (#5776)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 14:12:54 +08:00
8531b23382 fix(deps): update module github.com/deckarep/golang-set/v2 to v2.6.0 [skip ci] (#5778)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 14:12:33 +08:00
2c15349ce4 fix(deps): update module github.com/gin-contrib/cors to v1.5.0 [skip ci] (#5779)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-02 14:12:13 +08:00
5afd65b65c fix(deps): update module github.com/aliyun/aliyun-oss-go-sdk to v2.2.10+incompatible (#5447)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-01 22:19:25 +08:00
e2434029f9 fix(deps): update module github.com/maruel/natural to v1.1.1 [skip ci] (#5771)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-01 22:18:51 +08:00
bdf7abe717 fix(deps): update module github.com/aws/aws-sdk-go to v1.49.13 [skip ci] (#5774)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-01 22:18:13 +08:00
2c8d003c2e fix(deps): update module github.com/djherbis/times to v1.6.0 [skip ci] (#5422)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-01 22:17:44 +08:00
a006f57637 fix(deps): update module google.golang.org/appengine to v1.6.8 [skip ci] (#5772)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-01 21:42:43 +08:00
be5d94cd11 fix(deps): update module golang.org/x/crypto to v0.17.0 [security] (#5768)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-01 21:42:03 +08:00
977b3cf9ab fix(deps): update golang.org/x/exp digest to 02704c9 [skip ci] (#5769)
* fix: missing modified in validate regexp

* fix(deps): update golang.org/x/exp digest to 02704c9

---------

Co-authored-by: Andy Hsu <i@nn.ci>
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-01 19:57:04 +08:00
182aacd309 fix(deps): update module github.com/gorilla/websocket to v1.5.1 [skip ci] (#5770)
* fix: missing modified in validate regexp

* fix(deps): update module github.com/gorilla/websocket to v1.5.1

---------

Co-authored-by: Andy Hsu <i@nn.ci>
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-01-01 19:55:31 +08:00
57bac9e0d2 fix: some missing regexp lib modified 2024-01-01 18:44:59 +08:00
478470f609 feat!: replace regex package (close #5755) 2023-12-31 15:03:25 +08:00
6b8f35e7fa feat(alipan): replace domain (#5751 close #5747) 2023-12-31 14:29:14 +08:00
697a0ed2d3 feat: add ldap login support (#5706)
* feat: add ldap login support

* fix: ldap permission config group
2023-12-31 13:46:13 +08:00
299bfb4d7b feat(115): support 302 redirect (#5733) 2023-12-25 11:28:57 +08:00
3eca38e599 feat: add support for client-side discoverable WebAuthn login (#5722)
* Add support for client-side discoverable in begin login

Use `(*webauthn.WebAuthn).BeginDiscoverableLogin()` to handle client-side discoverable login.

* Upgrade github.com/go-webauthn/webauthn to v0.10.0

Upgrade [go-webauthn/webauthn](github.com/go-webauthn/webauthn) library to latest.

The convenient finish login function (as FinishDiscoverableLogin) for discoverable functions has been added in the v0.9.0. [^1]

---

[^1]: https://github.com/go-webauthn/webauthn/releases/tag/v0.9.0

* Add support for client-side discoverable in validating login

Use `(*webauthn.WebAuthn).FinishDiscoverableLogin()` to handle client-side discoverable login.

> **NOTE**:
- The first param `rawID` in this callback function is unnecessary to check, it's handled by the third-party webauthn library later.
- `userHandle` param is equal to the ID returned by (User).WebAuthnID() function.
2023-12-24 15:21:17 +08:00
ab216ed170 fix(onedrive): rename object in root folder (close #5468) 2023-12-17 22:58:26 +08:00
e91c42c9dc fix(alist_v3): timeout on upload (close #5465) 2023-12-17 15:45:27 +08:00
54f7b21a73 fix(123): api sign error (#5689 close #5083)
* fix:123 driver connect error

* feat: calculate sign with pure go

---------

Co-authored-by: tangminghao <tangminghao@hxzn.com>
Co-authored-by: Andy Hsu <i@nn.ci>
2023-12-17 15:21:32 +08:00
de56f926cf feat(139): support new personal cloud api (#5690)
Co-authored-by: Andy Hsu <i@nn.ci>
2023-12-16 16:56:45 +08:00
6d4ab57a0e build: enable cgo for win/arm64 [skip ci] 2023-12-15 18:22:16 +08:00
734d4b0354 ci: add darwin/arm64 target to dev build 2023-12-15 17:07:02 +08:00
74b20dedc3 fix: retry multipart file reset (#5693 close #5628) 2023-12-14 21:31:36 +08:00
83c2269330 fix(qbit): seed time doesn't take effect (close #5663) 2023-12-11 15:20:29 +08:00
296be88b5f fix: incorrect key of oidc username (close #5670) 2023-12-10 13:17:56 +08:00
026e944cbb feat: add task info to resp of add task api (close #5579) 2023-12-03 14:44:20 +08:00
8bdfc7ac8e fix(offline_download): don't wait for transfer task (close #5595) 2023-12-03 14:20:01 +08:00
e4a6b758dc docs: remove jetbrains in special sponsor [skip ci] 2023-12-03 12:57:35 +08:00
66b7fe1e1b fix: task cannot be retried manually (close #5599) 2023-11-30 20:44:05 +08:00
f475eb4401 fix: incorrect go-version on auto-lang 2023-11-30 12:37:25 +08:00
b99e709bdb fix(teambition): international upload (close #5360) 2023-11-29 22:51:03 +08:00
f4dcf4599c fix: add error handling for webdav mkcol according to RFC 4918 (#5581)
* feat: add error handling for mkcol method in webdav.go

* feat: update rfc reference

* fix: fix issue with uncorrect error handling
2023-11-27 18:53:52 +08:00
54e75d7287 feat: enabled sign_all by default 2023-11-25 20:27:23 +08:00
d142fc3449 ci: upgrade golang version 2023-11-25 16:09:38 +08:00
f23567199b chore: go mod tidy 2023-11-25 15:12:25 +08:00
1420492d81 ci: go get after replacing go mod 2023-11-25 15:11:29 +08:00
b88067ea2f ci: fix docker build error: 'pread64' undeclared here 2023-11-25 14:42:33 +08:00
d5f381ef6f chore: upgrade golang version 2023-11-25 14:22:13 +08:00
68af284dad fix: task popped but not execute (close #5565) 2023-11-25 14:15:17 +08:00
d26887d211 fix: content-type conflicts with #5420 2023-11-24 19:22:19 +08:00
3f405de6a9 feat: customize allow origins, headers and methods 2023-11-24 19:18:34 +08:00
6100647310 fix: reflected XSS vulnerability plist api 2023-11-24 16:46:48 +08:00
34746e951c feat(offline_download): add simple http tool (close #4002) 2023-11-24 16:26:05 +08:00
b6134dc515 feat: allow keep files in offline download (close #4678) 2023-11-24 15:02:36 +08:00
d455a232ef fix(vtencent): hack file with size 0 but actual size is not 0
- allow use another proxy for vtencent and chaoxing
2023-11-23 22:35:07 +08:00
fe34d30d17 feat(crypt): add show hidden option (#5554) 2023-11-23 21:50:16 +08:00
0fbb986ba9 fix(aliyundrive_open): mitigation measures for 15-minute limit (#5560 close #5547)
* fix(aliyundrive_open):Mitigation measures for AliOpen's 15-minute limit.

I conducted small-scale tests, which seem to have no significant negative impact. If the 15-minute issue still occurs, further measures will be needed. Methods like local proxy can be attempted.

* chore(aliyundrive_open): change cache of the link to 1 minute

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2023-11-23 21:49:16 +08:00
1280070438 feat: add chaoxing and vtencent driver (#5526 close #3347)
* add chaoxing and vtencent

* add vtencent put file

* add sha1 to transfer files instantly

* simplified upload file code

* setting onlyproxy

* fix get files modifyDate bug
2023-11-23 21:40:16 +08:00
d7f66138eb docs: add sponsor VidHub [skip ci] 2023-11-22 15:09:39 +08:00
b2890f05ab feat: retry all failed task (close #5242) 2023-11-21 15:54:42 +08:00
7583c4d734 feat: customize workers and retry of task (close #5493 fix #5274) 2023-11-21 15:51:57 +08:00
11a30c5044 feat: refactor task module 2023-11-20 18:01:51 +08:00
de9647a5fa chore: remove useless code 2023-11-19 20:05:09 +08:00
8d5283604c ci: add short sha to artifact 2023-11-19 15:21:25 +08:00
867accafd1 fix(local): video file thumbnails not displaying on iOS Safari (#5420)
* perf(webdav): support for cookies on webdav drive

* fix(local): video file thumbnails not displaying on iOS Safari
2023-11-18 22:36:41 +08:00
6fc6751463 feat: support using external dist files (close #5531) 2023-11-18 19:56:22 +08:00
f904596cbc chore: remove refs to deprecated io/ioutil (#5519)
Signed-off-by: guoguangwu <guoguangwu@magic-shield.com>
2023-11-16 05:16:15 -06:00
3d51845f57 feat: invalidate old token after changing the password (close #5515) 2023-11-13 15:22:42 +08:00
a7421d8fc2 fix(deps): update module github.com/aws/aws-sdk-go to v1.46.7 (#5068)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-11-12 15:14:27 +08:00
55a14bc271 fix(mopan): 302 Redirect (#5505 close #5502)
* fix(mopan):302 Redirect

* fix(mopan): do not forget to close the body

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2023-11-12 15:13:55 +08:00
91f51f17d0 feat(webdav): add tls_insecure_skip_verify field (close #5490) 2023-11-10 15:38:23 +08:00
4355dae491 fix: incorrect content-type of apk files (close #5385) 2023-11-06 18:20:25 +08:00
da1c7a4c23 feat: add 115_share driver (#5481 close #5384)
This update introduces the ability to mount 115 share links.
 Currently, only listing and downloading are supported. Note that login and share link are required for this feature to work.

 Close #5384
2023-11-06 16:58:57 +08:00
769281bd40 feat: refactor offline download (#5408 close #4108)
* wip: refactor offline download (#5331)

* base tool

* working: aria2

* refactor: change type of percentage to float64

* wip: adapt aria2

* wip: use items in offline_download

* wip: use tool manager

* wip: adapt qBittorrent

* chore: fix typo

* Squashed commit of the following:

commit 4fc0a77565
Author: Andy Hsu <i@nn.ci>
Date:   Fri Oct 20 21:06:25 2023 +0800

    fix(baidu_netdisk): upload file > 4GB (close #5392)

commit aaffaee2b5
Author: gmugu <94156510@qq.com>
Date:   Thu Oct 19 19:17:53 2023 +0800

    perf(webdav): support request with cookies (#5391)

commit 8ef8023c20
Author: NewbieOrange <NewbieOrange@users.noreply.github.com>
Date:   Thu Oct 19 19:17:09 2023 +0800

    fix(aliyundrive_open): upload progress for normal upload (#5398)

commit cdfbe6dcf2
Author: foxxorcat <95907542+foxxorcat@users.noreply.github.com>
Date:   Wed Oct 18 16:27:07 2023 +0800

    fix: hash gcid empty file (#5394)

commit 94d028743a
Author: Andy Hsu <i@nn.ci>
Date:   Sat Oct 14 13:17:51 2023 +0800

    ci: remove `pr-welcome` label when close issue [skip ci]

commit 7f7335435c
Author: itsHenry <2671230065@qq.com>
Date:   Sat Oct 14 13:12:46 2023 +0800

    feat(cloudreve): support thumbnail (#5373 close #5348)

    * feat(cloudreve): support thumbnail

    * chore: remove unnecessary code

commit b9e192b29c
Author: foxxorcat <95907542+foxxorcat@users.noreply.github.com>
Date:   Thu Oct 12 20:57:12 2023 +0800

    fix(115): limit request rate (#5367 close #5275)

    * fix(115):limit request rate

    * chore(115): fix unit of `limit_rate`

    ---------

    Co-authored-by: Andy Hsu <i@nn.ci>

commit 69a98eaef6
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Wed Oct 11 22:01:55 2023 +0800

    fix(deps): update module github.com/aliyun/aliyun-oss-go-sdk to v2.2.9+incompatible (#5141)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit 1ebc96a4e5
Author: Andy Hsu <i@nn.ci>
Date:   Tue Oct 10 18:32:00 2023 +0800

    fix(wopan): fatal error concurrent map writes (close #5352)

commit 66e2324cac
Author: Andy Hsu <i@nn.ci>
Date:   Tue Oct 10 18:23:11 2023 +0800

    chore(deps): upgrade dependencies

commit 7600dc28df
Author: Andy Hsu <i@nn.ci>
Date:   Tue Oct 10 18:13:58 2023 +0800

    fix(aliyundrive_open): change default api to raw server (close #5358)

commit 8ef89ad0a4
Author: foxxorcat <95907542+foxxorcat@users.noreply.github.com>
Date:   Tue Oct 10 18:08:27 2023 +0800

    fix(baidu_netdisk): hash and `error 2` (#5356)

    * fix(baidu):hash and error:2

    * fix:invalid memory address

commit 35d672217d
Author: jeffmingup <1960588251@qq.com>
Date:   Sun Oct 8 19:29:45 2023 +0800

    fix(onedrive_app): incorrect api on `_accessToken` (#5346)

commit 1a283bb272
Author: foxxorcat <95907542+foxxorcat@users.noreply.github.com>
Date:   Fri Oct 6 16:04:39 2023 +0800

    feat(google_drive): add `hash_info`, `ctime`, `thumbnail` (#5334)

commit a008f54f4d
Author: nkh0472 <67589323+nkh0472@users.noreply.github.com>
Date:   Thu Oct 5 13:10:51 2023 +0800

    docs: minor language improvements (#5329) [skip ci]

* fix: adapt update progress type

* Squashed commit of the following:

commit 65c5ec0c34
Author: itsHenry <2671230065@qq.com>
Date:   Sat Nov 4 13:35:09 2023 +0800

    feat(cloudreve): folder size count and switch (#5457 close #5395)

commit a6325967d0
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Mon Oct 30 15:11:20 2023 +0800

    fix(deps): update module github.com/charmbracelet/lipgloss to v0.9.1 (#5234)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit 4dff49470a
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Mon Oct 30 15:10:36 2023 +0800

    fix(deps): update golang.org/x/exp digest to 7918f67 (#5366)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit cc86d6f3d1
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Sun Oct 29 14:45:55 2023 +0800

    fix(deps): update module golang.org/x/net to v0.17.0 [security] (#5370)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit c0f9c8ebaf
Author: Andy Hsu <i@nn.ci>
Date:   Thu Oct 26 19:21:09 2023 +0800

    feat: add ignore direct link params (close #5434)
2023-11-06 16:56:55 +08:00
3bbdd4fa89 fix(115): fix driver package import and variable (#5482)
names
2023-11-06 16:53:57 +08:00
68f440abdb fix(weiyun): unmarshal overflow (#5459) 2023-11-05 22:41:14 +08:00
65c5ec0c34 feat(cloudreve): folder size count and switch (#5457 close #5395) 2023-11-04 13:35:09 +08:00
a6325967d0 fix(deps): update module github.com/charmbracelet/lipgloss to v0.9.1 (#5234)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-10-30 15:11:20 +08:00
4dff49470a fix(deps): update golang.org/x/exp digest to 7918f67 (#5366)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-10-30 15:10:36 +08:00
cc86d6f3d1 fix(deps): update module golang.org/x/net to v0.17.0 [security] (#5370)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-10-29 14:45:55 +08:00
c0f9c8ebaf feat: add ignore direct link params (close #5434) 2023-10-26 19:21:09 +08:00
4fc0a77565 fix(baidu_netdisk): upload file > 4GB (close #5392) 2023-10-20 21:06:25 +08:00
aaffaee2b5 perf(webdav): support request with cookies (#5391) 2023-10-19 19:17:53 +08:00
8ef8023c20 fix(aliyundrive_open): upload progress for normal upload (#5398) 2023-10-19 19:17:09 +08:00
cdfbe6dcf2 fix: hash gcid empty file (#5394) 2023-10-18 16:27:07 +08:00
94d028743a ci: remove pr-welcome label when close issue [skip ci] 2023-10-14 13:17:51 +08:00
7f7335435c feat(cloudreve): support thumbnail (#5373 close #5348)
* feat(cloudreve): support thumbnail

* chore: remove unnecessary code
2023-10-14 13:12:46 +08:00
b9e192b29c fix(115): limit request rate (#5367 close #5275)
* fix(115):limit request rate

* chore(115): fix unit of `limit_rate`

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2023-10-12 20:57:12 +08:00
69a98eaef6 fix(deps): update module github.com/aliyun/aliyun-oss-go-sdk to v2.2.9+incompatible (#5141)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-10-11 22:01:55 +08:00
1ebc96a4e5 fix(wopan): fatal error concurrent map writes (close #5352) 2023-10-10 18:32:00 +08:00
66e2324cac chore(deps): upgrade dependencies 2023-10-10 18:23:11 +08:00
7600dc28df fix(aliyundrive_open): change default api to raw server (close #5358) 2023-10-10 18:13:58 +08:00
8ef89ad0a4 fix(baidu_netdisk): hash and error 2 (#5356)
* fix(baidu):hash and error:2

* fix:invalid memory address
2023-10-10 18:08:27 +08:00
35d672217d fix(onedrive_app): incorrect api on _accessToken (#5346) 2023-10-08 19:29:45 +08:00
1a283bb272 feat(google_drive): add hash_info, ctime, thumbnail (#5334) 2023-10-06 16:04:39 +08:00
a008f54f4d docs: minor language improvements (#5329) [skip ci] 2023-10-05 13:10:51 +08:00
3d7f79cba8 docs: change domain of contributors image [skip ci] 2023-10-03 17:34:24 +08:00
9ff83a7950 feat: add header to meta (ref #5317) 2023-10-02 16:43:29 +08:00
e719a1a456 feat(sso): custom username key for OIDC (close #5169) 2023-10-02 14:42:40 +08:00
40a6fcbdff ci: do not stale issue with working or pr-welcome label [skip ci] 2023-10-02 14:13:11 +08:00
0fd51646f6 feat(onedrive): custom host for download link (close #5310) 2023-10-02 14:07:47 +08:00
e8958019d9 fix(115): allow use proxy directly (close #5324) 2023-10-02 14:00:13 +08:00
e1ef690784 fix(terabox): encode parameters for filemanager api (#5308) 2023-10-01 16:58:29 +08:00
4024050dd0 chore: fix typo (#5316) 2023-10-01 16:58:00 +08:00
eb918658f0 fix(deps): update module github.com/ipfs/go-ipfs-api to v0.7.0 (#5247)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-30 22:58:19 +08:00
fb13dae136 feat(crypt): optional pre-generated thumbnails (#5284) 2023-09-27 13:57:10 +08:00
6b67a36d63 fix(terabox): auto refresh JsToken (close #5277) 2023-09-25 16:38:05 +08:00
a64dd4885e fix(139): fixed time zone (close #5263) 2023-09-22 16:54:16 +08:00
0f03a747d8 ci: cancel previous workflow run 2023-09-22 16:53:07 +08:00
30977cdc6d feat: sso compatibility mode (#5260) 2023-09-22 16:45:51 +08:00
106cf720c1 fix(baidu_netdisk): retry logic in request (close #5262) 2023-09-22 16:27:44 +08:00
882112ed1c feat: add hash_info field to /fs/get (close #5259) 2023-09-22 15:20:04 +08:00
2a6ab77295 fix(115): data race in Link (#5253) 2023-09-21 13:39:07 +08:00
f0981a0c8d chore(virtual): implement the driver interface with result 2023-09-20 09:02:56 +08:00
57eea4db17 fix(deps): update module github.com/go-resty/resty/v2 to v2.8.0 (#5244)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-20 08:51:34 +08:00
234852ca61 fix(deps): update module github.com/pkg/sftp to v1.13.6 (#5041)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-19 20:02:42 +08:00
809105b67e fix(deps): update module github.com/blevesearch/bleve/v2 to v2.3.10 (#5232)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-17 15:57:29 +08:00
02e8c31506 fix(deps): update golang.org/x/exp digest to 9212866 (#5205)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-16 23:21:42 +08:00
19b39a5c04 fix(onedrive): overwrite upload big file (close #5217 in #5218)
See https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
2023-09-14 13:38:07 +08:00
28e2731594 fix: clear cache recursively on deleting the folder (close #5209) 2023-09-13 16:06:17 +08:00
b1a279cbcc feat(139): implement MoveResult interface (close #5130) 2023-09-13 15:56:13 +08:00
352a6a741a feat(webdav): support copy directly without task (close #5206) 2023-09-13 15:45:57 +08:00
109015567a fix(deps): update module golang.org/x/oauth2 to v0.12.0 (#5058)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-12 12:52:48 +08:00
9e0fa77ca2 feat: add 123 link driver (close #4924) 2023-09-10 16:50:10 +08:00
335b11c698 chore: implement the driver interface with obj return [skip ci] 2023-09-08 15:25:49 +08:00
8e433355e6 fix(terabox): missing JsToken field on request (close #5189) 2023-09-08 15:18:56 +08:00
3504f017b9 fix(upload): memory leak on form upload as task (close #5185) 2023-09-07 15:51:52 +08:00
cd2f8077fa chore: enable all pprof handle on debug 2023-09-07 14:56:50 +08:00
d5b68a91d2 fix(webdav): optimize HEAD request (close #5182) 2023-09-06 16:32:51 +08:00
623c7dcea5 fix(189pc): get real link after redirect 2023-09-06 16:02:28 +08:00
ecbd6d86cd fix(lanzou): sub file in share folder need pwd (#5184) 2023-09-06 14:48:12 +08:00
7200344ace feat: adapt hash feature for some drivers (#5180)
* feat(pikpak,thunder): adaptation gcid hash

* chore(weiyun): add note

* feat(baidu_netdisk): adaptation rapid

* feat(baidu_photo): adaptation hash

* feat(189pc): adaptation rapid

* feat(mopan):adaptation ctime

* feat(139):adaptation hash and ctime

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2023-09-06 14:46:35 +08:00
b313ac4daa fix(crypt): fix 139cloud hack (#5178)
(cherry picked from commit 18bf64af47e58cc69cdd2e598de9c19538a7bf78)
2023-09-06 14:12:01 +08:00
f2f312b43a fix: http response body not close on status >= 400 (close #5163) 2023-09-05 15:46:16 +08:00
6f6d20e1ba fix: force_https not take effect on noRoute (close #5167) 2023-09-05 13:05:46 +08:00
3231c3d930 perf(db): release database before exit 2023-09-05 13:04:27 +08:00
b604e21c69 feat(webdav): support http chunked request (close #5161 in #5162)
But we do not recommend not adding the content-length header when putting files
2023-09-05 13:03:29 +08:00
231 changed files with 9586 additions and 2429 deletions

2
.github/stale.yml vendored
View File

@ -6,6 +6,8 @@ daysUntilClose: 20
exemptLabels:
- accepted
- security
- working
- pr-welcome
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable

View File

@ -11,27 +11,31 @@ on:
- 'cmd/lang.go'
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
auto_lang:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.20' ]
go-version: [ '1.21' ]
name: auto generate lang.json
runs-on: ${{ matrix.platform }}
steps:
- name: Setup go
uses: actions/setup-go@v4
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Checkout alist
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
path: alist
- name: Checkout alist-web
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: 'alist-org/alist-web'
ref: main

View File

@ -6,22 +6,29 @@ on:
pull_request:
branches: [ 'main' ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build:
strategy:
matrix:
platform: [ubuntu-latest]
go-version: [ '1.20' ]
go-version: [ '1.21' ]
name: Build
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v4
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- uses: benjlevesque/short-sha@v2.2
id: short-sha
- name: Install dependencies
run: |
@ -35,7 +42,7 @@ jobs:
bash build.sh dev
- name: Upload artifact
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: alist
name: alist_${{ env.SHA }}
path: dist

View File

@ -3,48 +3,88 @@ name: build_docker
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build_docker:
name: Build docker
name: Build Docker
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
uses: docker/metadata-action@v5
with:
images: xhofe/alist
- name: Replace release with dev
run: |
sed -i 's/release/dev/g' Dockerfile
- name: Docker meta with ffmpeg
id: meta-ffmpeg
uses: docker/metadata-action@v5
with:
images: xhofe/alist
flavor: |
suffix=-ffmpeg,onlatest=true
- uses: actions/setup-go@v4
with:
go-version: 'stable'
- name: Build go binary
run: bash build.sh dev docker-multiplatform
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v2
if: github.event_name == 'push'
uses: docker/login-action@v3
with:
username: xhofe
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v4
uses: docker/build-push-action@v5
with:
context: .
push: true
file: Dockerfile.ci
push: ${{ github.event_name == 'push' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
- name: Replace dockerfile tag
run: |
sed -i -e "s/latest/main/g" Dockerfile.ffmpeg
- name: Build and push with ffmpeg
id: docker_build_ffmpeg
uses: docker/build-push-action@v5
with:
file: Dockerfile.ffmpeg
push: ${{ github.event_name == 'push' }}
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
build_docker_with_aria2:
needs: build_docker
name: Build docker with aria2
runs-on: ubuntu-latest
if: github.event_name == 'push'
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: alist-org/with_aria2
ref: main
@ -62,4 +102,4 @@ jobs:
with:
github_token: ${{ secrets.MY_TOKEN }}
branch: main
repository: alist-org/with_aria2
repository: alist-org/with_aria2

View File

@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result

View File

@ -14,4 +14,4 @@ jobs:
actions: 'remove-labels'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: 'working'
labels: 'working,pr-welcome'

View File

@ -9,7 +9,7 @@ jobs:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.20' ]
go-version: [ '1.21' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
@ -21,12 +21,12 @@ jobs:
prerelease: true
- name: Setup Go
uses: actions/setup-go@v4
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0
@ -53,7 +53,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: alist-org/desktop-release
ref: main

34
.github/workflows/release_android.yml vendored Normal file
View File

@ -0,0 +1,34 @@
name: release_android
on:
release:
types: [ published ]
jobs:
release_android:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.21' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Build
run: |
bash build.sh release android
- name: Upload assets
uses: softprops/action-gh-release@v1
with:
files: build/compress/*

View File

@ -11,43 +11,70 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- uses: actions/setup-go@v4
with:
go-version: 'stable'
- name: Build go binary
run: bash build.sh release docker-multiplatform
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
uses: docker/metadata-action@v5
with:
images: xhofe/alist
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: xhofe
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v4
uses: docker/build-push-action@v5
with:
context: .
file: Dockerfile.ci
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
- name: Docker meta with ffmpeg
id: meta-ffmpeg
uses: docker/metadata-action@v5
with:
images: xhofe/alist
flavor: |
latest=true
suffix=-ffmpeg,onlatest=true
- name: Build and push with ffmpeg
id: docker_build_ffmpeg
uses: docker/build-push-action@v5
with:
file: Dockerfile.ffmpeg
push: true
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
release_docker_with_aria2:
needs: release_docker
name: Release docker with aria2
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: alist-org/with_aria2
ref: main

View File

@ -9,18 +9,18 @@ jobs:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.20' ]
go-version: [ '1.21' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v4
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0

View File

@ -9,18 +9,18 @@ jobs:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.20' ]
go-version: [ '1.21' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v4
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0

View File

@ -1,18 +1,23 @@
FROM alpine:3.18 as builder
FROM alpine:edge as builder
LABEL stage=go-builder
WORKDIR /app/
RUN apk add --no-cache bash curl gcc git go musl-dev
COPY go.mod go.sum ./
RUN go mod download
COPY ./ ./
RUN apk add --no-cache bash curl gcc git go musl-dev; \
bash build.sh release docker
RUN bash build.sh release docker
FROM alpine:3.18
FROM alpine:edge
LABEL MAINTAINER="i@nn.ci"
VOLUME /opt/alist/data/
WORKDIR /opt/alist/
COPY --from=builder /app/bin/alist ./
COPY entrypoint.sh /entrypoint.sh
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
chmod +x /entrypoint.sh
RUN apk update && \
apk upgrade --no-cache && \
apk add --no-cache bash ca-certificates su-exec tzdata; \
chmod +x /entrypoint.sh && \
rm -rf /var/cache/apk/*
ENV PUID=0 PGID=0 UMASK=022
EXPOSE 5244 5245
CMD [ "/entrypoint.sh" ]
CMD [ "/entrypoint.sh" ]

16
Dockerfile.ci Normal file
View File

@ -0,0 +1,16 @@
FROM alpine:edge
ARG TARGETPLATFORM
LABEL MAINTAINER="i@nn.ci"
VOLUME /opt/alist/data/
WORKDIR /opt/alist/
COPY /${TARGETPLATFORM}/alist ./
COPY entrypoint.sh /entrypoint.sh
RUN apk update && \
apk upgrade --no-cache && \
apk add --no-cache bash ca-certificates su-exec tzdata; \
chmod +x /entrypoint.sh && \
rm -rf /var/cache/apk/* && \
/entrypoint.sh version
ENV PUID=0 PGID=0 UMASK=022
EXPOSE 5244 5245
CMD [ "/entrypoint.sh" ]

4
Dockerfile.ffmpeg Normal file
View File

@ -0,0 +1,4 @@
FROM xhofe/alist:latest
RUN apk update && \
apk add --no-cache ffmpeg \
rm -rf /var/cache/apk/*

View File

@ -43,9 +43,9 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
## Features
- [x] Multiple storage
- [x] Multiple storages
- [x] Local storage
- [x] [Aliyundrive](https://www.aliyundrive.com/)
- [x] [Aliyundrive](https://www.alipan.com/)
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
- [x] [GoogleDrive](https://drive.google.com/)
@ -66,7 +66,8 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
- [x] [Quark](https://pan.quark.cn)
- [x] [Thunder](https://pan.xunlei.com)
- [x] [Lanzou](https://www.lanzou.com/)
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
- [x] [ILanzou](https://www.ilanzou.com/)
- [x] [Aliyundrive share](https://www.alipan.com/)
- [x] [Google photo](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [Baidu photo](https://photo.baidu.com/)
@ -74,6 +75,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] [FeijiPan](https://www.feijipan.com/)
- [x] Easy to deploy and out-of-the-box
- [x] File preview (PDF, markdown, code, plain text, ...)
- [x] Image preview in gallery mode
@ -86,7 +88,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
- [x] Protected routes (password protection and authentication)
- [x] WebDav (see https://alist.nn.ci/guide/webdav.html for details)
- [x] [Docker Deploy](https://hub.docker.com/r/xhofe/alist)
- [x] Cloudflare workers proxy
- [x] Cloudflare Workers proxy
- [x] File/Folder package download
- [x] Web upload(Can allow visitors to upload), delete, mkdir, rename, move and copy
- [x] Offline download
@ -103,7 +105,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
## Discussion
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature request only.**
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
## Sponsor
@ -112,22 +114,22 @@ https://alist.nn.ci/guide/sponsor.html
### Special sponsors
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
## Contributors
Thanks goes to these wonderful people:
[![Contributors](http://contributors.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
[![Contributors](http://contrib.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
## License
The `AList` is open-source software licensed under the AGPL-3.0 license.
## Disclaimer
- This program is a free and open source project. It is designed to share files on the network disk, which is convenient for downloading and learning golang. Please abide by relevant laws and regulations when using it, and do not abuse it;
- This program is a free and open source project. It is designed to share files on the network disk, which is convenient for downloading and learning Golang. Please abide by relevant laws and regulations when using it, and do not abuse it;
- This program is implemented by calling the official sdk/interface, without destroying the official interface behavior;
- This program only does 302 redirect/traffic forwarding, and does not intercept, store, or tamper with any user data;
- Before using this program, you should understand and bear the corresponding risks, including but not limited to account ban, download speed limit, etc., which is none of this program's business;

View File

@ -45,7 +45,7 @@
- [x] 多种存储
- [x] 本地存储
- [x] [阿里云盘](https://www.aliyundrive.com/)
- [x] [阿里云盘](https://www.alipan.com/)
- [x] OneDrive / Sharepoint[国际版](https://www.office.com/), [世纪互联](https://portal.partner.microsoftonline.cn),de,us
- [x] [天翼云盘](https://cloud.189.cn) (个人云, 家庭云)
- [x] [GoogleDrive](https://drive.google.com/)
@ -65,7 +65,8 @@
- [x] [夸克网盘](https://pan.quark.cn)
- [x] [迅雷网盘](https://pan.xunlei.com)
- [x] [蓝奏云](https://www.lanzou.com/)
- [x] [阿里云盘分享](https://www.aliyundrive.com/)
- [x] [蓝奏云优享版](https://www.ilanzou.com/)
- [x] [阿里云盘分享](https://www.alipan.com/)
- [x] [谷歌相册](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [一刻相册](https://photo.baidu.com/)
@ -73,6 +74,7 @@
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] [飞机盘](https://www.feijipan.com/)
- [x] 部署方便,开箱即用
- [x] 文件预览PDF、markdown、代码、纯文本……
- [x] 画廊模式下的图像预览
@ -110,15 +112,15 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
### 特别赞助
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (国内API服务器赞助)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
- [VidHub](https://zh.okaapps.com/product/1659622164?ref=alist) - 苹果生态下优雅的网盘视频播放器iPhoneiPadMacApple TV全平台支持。
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
## 贡献者
Thanks goes to these wonderful people:
[![Contributors](http://contributors.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
[![Contributors](http://contrib.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
## 许可

View File

@ -45,7 +45,7 @@
- [x] マルチストレージ
- [x] ローカルストレージ
- [x] [Aliyundrive](https://www.aliyundrive.com/)
- [x] [Aliyundrive](https://www.alipan.com/)
- [x] OneDrive / Sharepoint ([グローバル](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
- [x] [GoogleDrive](https://drive.google.com/)
@ -66,7 +66,8 @@
- [x] [Quark](https://pan.quark.cn)
- [x] [Thunder](https://pan.xunlei.com)
- [x] [Lanzou](https://www.lanzou.com/)
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
- [x] [ILanzou](https://www.ilanzou.com/)
- [x] [Aliyundrive share](https://www.alipan.com/)
- [x] [Google photo](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [Baidu photo](https://photo.baidu.com/)
@ -74,6 +75,7 @@
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] [FeijiPan](https://www.feijipan.com/)
- [x] デプロイが簡単で、すぐに使える
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
- [x] ギャラリーモードでの画像プレビュー
@ -112,15 +114,15 @@ https://alist.nn.ci/guide/sponsor.html
### スペシャルスポンサー
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
## コントリビューター
これらの素晴らしい人々に感謝します:
[![Contributors](http://contributors.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
[![Contributors](http://contrib.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
## ライセンス

View File

@ -49,6 +49,7 @@ BuildWinArm64() {
export GOARCH=arm64
export CC=$(pwd)/wrapper/zcc-arm64
export CXX=$(pwd)/wrapper/zcxx-arm64
export CGO_ENABLED=1
go build -o "$1" -ldflags="$ldflags" -tags=jsoniter .
}
@ -75,7 +76,7 @@ BuildDev() {
export CGO_ENABLED=1
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
done
xgo -targets=windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
xgo -targets=windows/amd64,darwin/amd64,darwin/arm64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
mv alist-* dist
cd dist
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
@ -84,10 +85,61 @@ BuildDev() {
cat md5.txt
}
PrepareBuildDocker() {
echo "replace github.com/mattn/go-sqlite3 => github.com/leso-kn/go-sqlite3 v0.0.0-20230710125852-03158dc838ed" >>go.mod
go get gorm.io/driver/sqlite@v1.4.4
go mod download
}
BuildDocker() {
PrepareBuildDocker
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
}
BuildDockerMultiplatform() {
PrepareBuildDocker
BASE="https://musl.cc/"
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
for i in "${FILES[@]}"; do
url="${BASE}${i}.tgz"
curl -L -o "${i}.tgz" "${url}"
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
rm -f "${i}.tgz"
done
docker_lflags="--extldflags '-static -fpic' $ldflags"
export CGO_ENABLED=1
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x)
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc)
for i in "${!OS_ARCHES[@]}"; do
os_arch=${OS_ARCHES[$i]}
cgo_cc=${CGO_ARGS[$i]}
os=${os_arch%%-*}
arch=${os_arch##*-}
export GOOS=$os
export GOARCH=$arch
export CC=${cgo_cc}
echo "building for $os_arch"
go build -o ./$os/$arch/alist -ldflags="$docker_lflags" -tags=jsoniter .
done
DOCKER_ARM_ARCHES=(linux-arm/v6 linux-arm/v7)
CGO_ARGS=(armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc)
GO_ARM=(6 7)
export GOOS=linux
export GOARCH=arm
for i in "${!DOCKER_ARM_ARCHES[@]}"; do
docker_arch=${DOCKER_ARM_ARCHES[$i]}
cgo_cc=${CGO_ARGS[$i]}
export GOARM=${GO_ARM[$i]}
export CC=${cgo_cc}
echo "building for $docker_arch"
go build -o ./${docker_arch%%-*}/${docker_arch##*-}/alist -ldflags="$docker_lflags" -tags=jsoniter .
done
}
BuildRelease() {
rm -rf .git/
mkdir -p "build"
@ -159,6 +211,27 @@ BuildReleaseLinuxMuslArm() {
done
}
BuildReleaseAndroid() {
rm -rf .git/
mkdir -p "build"
wget https://dl.google.com/android/repository/android-ndk-r26b-linux.zip
unzip android-ndk-r26b-linux.zip
rm android-ndk-r26b-linux.zip
OS_ARCHES=(amd64 arm64 386 arm)
CGO_ARGS=(x86_64-linux-android24-clang aarch64-linux-android24-clang i686-linux-android24-clang armv7a-linux-androideabi24-clang)
for i in "${!OS_ARCHES[@]}"; do
os_arch=${OS_ARCHES[$i]}
cgo_cc=$(realpath android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/${CGO_ARGS[$i]})
echo building for android-${os_arch}
export GOOS=android
export GOARCH=${os_arch##*-}
export CC=${cgo_cc}
export CGO_ENABLED=1
go build -o ./build/$appName-android-$os_arch -ldflags="$ldflags" -tags=jsoniter .
android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip ./build/$appName-android-$os_arch
done
}
MakeRelease() {
cd build
mkdir compress
@ -166,6 +239,11 @@ MakeRelease() {
cp "$i" alist
tar -czvf compress/"$i".tar.gz alist
rm -f alist
done
for i in $(find . -type f -name "$appName-android-*"); do
cp "$i" alist
tar -czvf compress/"$i".tar.gz alist
rm -f alist
done
for i in $(find . -type f -name "$appName-darwin-*"); do
cp "$i" alist
@ -187,6 +265,8 @@ if [ "$1" = "dev" ]; then
FetchWebDev
if [ "$2" = "docker" ]; then
BuildDocker
elif [ "$2" = "docker-multiplatform" ]; then
BuildDockerMultiplatform
else
BuildDev
fi
@ -194,12 +274,17 @@ elif [ "$1" = "release" ]; then
FetchWebRelease
if [ "$2" = "docker" ]; then
BuildDocker
elif [ "$2" = "docker-multiplatform" ]; then
BuildDockerMultiplatform
elif [ "$2" = "linux_musl_arm" ]; then
BuildReleaseLinuxMuslArm
MakeRelease "md5-linux-musl-arm.txt"
elif [ "$2" = "linux_musl" ]; then
BuildReleaseLinuxMusl
MakeRelease "md5-linux-musl.txt"
elif [ "$2" = "android" ]; then
BuildReleaseAndroid
MakeRelease "md5-android.txt"
else
BuildRelease
MakeRelease "md5.txt"

View File

@ -19,6 +19,7 @@ var AdminCmd = &cobra.Command{
Short: "Show admin user's info and some operations about admin user's password",
Run: func(cmd *cobra.Command, args []string) {
Init()
defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)
@ -57,6 +58,7 @@ var ShowTokenCmd = &cobra.Command{
Short: "Show admin token",
Run: func(cmd *cobra.Command, args []string) {
Init()
defer Release()
token := setting.GetStr(conf.Token)
utils.Log.Infof("Admin token: %s", token)
},
@ -64,6 +66,7 @@ var ShowTokenCmd = &cobra.Command{
func setAdminPassword(pwd string) {
Init()
defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)

View File

@ -15,6 +15,7 @@ var Cancel2FACmd = &cobra.Command{
Short: "Delete 2FA of admin user",
Run: func(cmd *cobra.Command, args []string) {
Init()
defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed to get admin user: %+v", err)

View File

@ -7,6 +7,7 @@ import (
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
"github.com/alist-org/alist/v3/internal/db"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
@ -19,6 +20,10 @@ func Init() {
bootstrap.InitIndex()
}
func Release() {
db.Close()
}
var pid = -1
var pidFile string

View File

@ -5,6 +5,8 @@ import (
"os"
"github.com/alist-org/alist/v3/cmd/flags"
_ "github.com/alist-org/alist/v3/drivers"
_ "github.com/alist-org/alist/v3/internal/offline_download"
"github.com/spf13/cobra"
)

View File

@ -2,6 +2,7 @@ package cmd
import (
"context"
"errors"
"fmt"
"net"
"net/http"
@ -13,7 +14,6 @@ import (
"time"
"github.com/alist-org/alist/v3/cmd/flags"
_ "github.com/alist-org/alist/v3/drivers"
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/pkg/utils"
@ -35,9 +35,9 @@ the address is defined in config file`,
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
}
bootstrap.InitAria2()
bootstrap.InitQbittorrent()
bootstrap.InitOfflineDownloadTools()
bootstrap.LoadStorages()
bootstrap.InitTaskManager()
if !flags.Debug && !flags.Dev {
gin.SetMode(gin.ReleaseMode)
}
@ -51,7 +51,7 @@ the address is defined in config file`,
httpSrv = &http.Server{Addr: httpBase, Handler: r}
go func() {
err := httpSrv.ListenAndServe()
if err != nil && err != http.ErrServerClosed {
if err != nil && !errors.Is(err, http.ErrServerClosed) {
utils.Log.Fatalf("failed to start http: %s", err.Error())
}
}()
@ -62,7 +62,7 @@ the address is defined in config file`,
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
go func() {
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
if err != nil && err != http.ErrServerClosed {
if err != nil && !errors.Is(err, http.ErrServerClosed) {
utils.Log.Fatalf("failed to start https: %s", err.Error())
}
}()
@ -86,7 +86,7 @@ the address is defined in config file`,
}
}
err = unixSrv.Serve(listener)
if err != nil && err != http.ErrServerClosed {
if err != nil && !errors.Is(err, http.ErrServerClosed) {
utils.Log.Fatalf("failed to start unix: %s", err.Error())
}
}()
@ -100,7 +100,7 @@ the address is defined in config file`,
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
<-quit
utils.Log.Println("Shutdown server...")
Release()
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
defer cancel()
var wg sync.WaitGroup

View File

@ -31,6 +31,7 @@ var disableStorageCmd = &cobra.Command{
}
mountPath := args[0]
Init()
defer Release()
storage, err := db.GetStorageByMountPath(mountPath)
if err != nil {
utils.Log.Errorf("failed to query storage: %+v", err)
@ -89,6 +90,7 @@ var listStorageCmd = &cobra.Command{
Short: "List all storages",
Run: func(cmd *cobra.Command, args []string) {
Init()
defer Release()
storages, _, err := db.GetStorages(1, -1)
if err != nil {
utils.Log.Errorf("failed to query storages: %+v", err)

View File

@ -2,19 +2,22 @@ package _115
import (
"context"
"strings"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/errors"
"strings"
"golang.org/x/time/rate"
)
type Pan115 struct {
model.Storage
Addition
client *driver115.Pan115Client
client *driver115.Pan115Client
limiter *rate.Limiter
}
func (d *Pan115) Config() driver.Config {
@ -26,14 +29,27 @@ func (d *Pan115) GetAddition() driver.Additional {
}
func (d *Pan115) Init(ctx context.Context) error {
if d.LimitRate > 0 {
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
}
return d.login()
}
func (d *Pan115) WaitLimit(ctx context.Context) error {
if d.limiter != nil {
return d.limiter.Wait(ctx)
}
return nil
}
func (d *Pan115) Drop(ctx context.Context) error {
return nil
}
func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if err := d.WaitLimit(ctx); err != nil {
return nil, err
}
files, err := d.getFiles(dir.GetID())
if err != nil && !errors.Is(err, driver115.ErrNotExist) {
return nil, err
@ -44,11 +60,12 @@ func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) (
}
func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
downloadInfo, err := d.client.
SetUserAgent(driver115.UA115Browser).
Download(file.(*FileObj).PickCode)
// recover for upload
d.client.SetUserAgent(driver115.UA115Desktop)
if err := d.WaitLimit(ctx); err != nil {
return nil, err
}
var userAgent = args.Header.Get("User-Agent")
downloadInfo, err := d.
DownloadWithUA(file.(*FileObj).PickCode, userAgent)
if err != nil {
return nil, err
}
@ -60,6 +77,9 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
}
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
if err := d.WaitLimit(ctx); err != nil {
return err
}
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
return err
}
@ -67,22 +87,38 @@ func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
}
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
if err := d.WaitLimit(ctx); err != nil {
return err
}
return d.client.Move(dstDir.GetID(), srcObj.GetID())
}
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
if err := d.WaitLimit(ctx); err != nil {
return err
}
return d.client.Rename(srcObj.GetID(), newName)
}
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
if err := d.WaitLimit(ctx); err != nil {
return err
}
return d.client.Copy(dstDir.GetID(), srcObj.GetID())
}
func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
if err := d.WaitLimit(ctx); err != nil {
return err
}
return d.client.Delete(obj.GetID())
}
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
if err := d.WaitLimit(ctx); err != nil {
return err
}
var (
fastInfo *driver115.UploadInitResp
dirID = dstDir.GetID()

View File

@ -6,17 +6,19 @@ import (
)
type Addition struct {
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
driver.RootID
}
var config = driver.Config{
Name: "115 Cloud",
DefaultRoot: "0",
OnlyProxy: true,
OnlyLocal: true,
Name: "115 Cloud",
DefaultRoot: "0",
//OnlyProxy: true,
//OnlyLocal: true,
NoOverwriteUpload: true,
}

View File

@ -5,12 +5,8 @@ import (
"crypto/tls"
"encoding/json"
"fmt"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aliyun/aliyun-oss-go-sdk/oss"
"github.com/orzogc/fake115uploader/cipher"
"io"
"net/http"
"net/url"
"path/filepath"
"strconv"
@ -18,29 +14,35 @@ import (
"sync"
"time"
"github.com/SheltonZhu/115driver/pkg/driver"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aliyun/aliyun-oss-go-sdk/oss"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
crypto "github.com/gaoyb7/115drive-webdav/115"
"github.com/orzogc/fake115uploader/cipher"
"github.com/pkg/errors"
)
var UserAgent = driver.UA115Desktop
var UserAgent = driver115.UA115Desktop
func (d *Pan115) login() error {
var err error
opts := []driver.Option{
driver.UA(UserAgent),
func(c *driver.Pan115Client) {
opts := []driver115.Option{
driver115.UA(UserAgent),
func(c *driver115.Pan115Client) {
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
},
}
d.client = driver.New(opts...)
cr := &driver.Credential{}
d.client = driver115.New(opts...)
cr := &driver115.Credential{}
if d.Addition.QRCodeToken != "" {
s := &driver.QRCodeSession{
s := &driver115.QRCodeSession{
UID: d.Addition.QRCodeToken,
}
if cr, err = d.client.QRCodeLogin(s); err != nil {
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
return errors.Wrap(err, "failed to login by qrcode")
}
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
@ -59,7 +61,7 @@ func (d *Pan115) login() error {
func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
res := make([]FileObj, 0)
if d.PageSize <= 0 {
d.PageSize = driver.FileListLimit
d.PageSize = driver115.FileListLimit
}
files, err := d.client.ListWithLimit(fileId, d.PageSize)
if err != nil {
@ -75,6 +77,61 @@ const (
appVer = "2.0.3.6"
)
func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
key := crypto.GenerateKey()
result := driver115.DownloadResp{}
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
if err != nil {
return nil, err
}
data := crypto.Encode(params, key)
bodyReader := strings.NewReader(url.Values{"data": []string{data}}.Encode())
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
req.Header.Set("Cookie", c.Cookie)
req.Header.Set("User-Agent", ua)
resp, err := c.client.Client.GetClient().Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
if err := utils.Json.Unmarshal(body, &result); err != nil {
return nil, err
}
if err = result.Err(string(body)); err != nil {
return nil, err
}
bytes, err := crypto.Decode(string(result.EncodedData), key)
if err != nil {
return nil, err
}
downloadInfo := driver115.DownloadData{}
if err := utils.Json.Unmarshal(bytes, &downloadInfo); err != nil {
return nil, err
}
for _, info := range downloadInfo {
if info.FileSize < 0 {
return nil, driver115.ErrDownloadEmpty
}
info.Header = resp.Request.Header
return info, nil
}
return nil, driver115.ErrUnexpected
}
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
var (
ecdhCipher *cipher.EcdhCipher
@ -249,7 +306,7 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
go func(threadId int) {
defer func() {
if r := recover(); r != nil {
errCh <- fmt.Errorf("Recovered in %v", r)
errCh <- fmt.Errorf("recovered in %v", r)
}
}()
for chunk := range chunksCh {

112
drivers/115_share/driver.go Normal file
View File

@ -0,0 +1,112 @@
package _115_share
import (
"context"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"golang.org/x/time/rate"
)
type Pan115Share struct {
model.Storage
Addition
client *driver115.Pan115Client
limiter *rate.Limiter
}
func (d *Pan115Share) Config() driver.Config {
return config
}
func (d *Pan115Share) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Pan115Share) Init(ctx context.Context) error {
if d.LimitRate > 0 {
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
}
return d.login()
}
func (d *Pan115Share) WaitLimit(ctx context.Context) error {
if d.limiter != nil {
return d.limiter.Wait(ctx)
}
return nil
}
func (d *Pan115Share) Drop(ctx context.Context) error {
return nil
}
func (d *Pan115Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if err := d.WaitLimit(ctx); err != nil {
return nil, err
}
files := make([]driver115.ShareFile, 0)
fileResp, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, dir.GetID(), driver115.QueryLimit(int(d.PageSize)))
if err != nil {
return nil, err
}
files = append(files, fileResp.Data.List...)
total := fileResp.Data.Count
count := len(fileResp.Data.List)
for total > count {
fileResp, err := d.client.GetShareSnap(
d.ShareCode, d.ReceiveCode, dir.GetID(),
driver115.QueryLimit(int(d.PageSize)), driver115.QueryOffset(count),
)
if err != nil {
return nil, err
}
files = append(files, fileResp.Data.List...)
count += len(fileResp.Data.List)
}
return utils.SliceConvert(files, transFunc)
}
func (d *Pan115Share) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if err := d.WaitLimit(ctx); err != nil {
return nil, err
}
downloadInfo, err := d.client.DownloadByShareCode(d.ShareCode, d.ReceiveCode, file.GetID())
if err != nil {
return nil, err
}
return &model.Link{URL: downloadInfo.URL.URL}, nil
}
func (d *Pan115Share) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
return errs.NotSupport
}
func (d *Pan115Share) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *Pan115Share) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
return errs.NotSupport
}
func (d *Pan115Share) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *Pan115Share) Remove(ctx context.Context, obj model.Obj) error {
return errs.NotSupport
}
func (d *Pan115Share) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
return errs.NotSupport
}
var _ driver.Driver = (*Pan115Share)(nil)

34
drivers/115_share/meta.go Normal file
View File

@ -0,0 +1,34 @@
package _115_share
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
PageSize int64 `json:"page_size" type:"number" default:"20" help:"list api per page size of 115 driver"`
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
driver.RootID
}
var config = driver.Config{
Name: "115 Share",
DefaultRoot: "",
// OnlyProxy: true,
// OnlyLocal: true,
CheckStatus: false,
Alert: "",
NoOverwriteUpload: true,
NoUpload: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Pan115Share{}
})
}

111
drivers/115_share/utils.go Normal file
View File

@ -0,0 +1,111 @@
package _115_share
import (
"fmt"
"strconv"
"time"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/errors"
)
var _ model.Obj = (*FileObj)(nil)
type FileObj struct {
Size int64
Sha1 string
Utm time.Time
FileName string
isDir bool
FileID string
}
func (f *FileObj) CreateTime() time.Time {
return f.Utm
}
func (f *FileObj) GetHash() utils.HashInfo {
return utils.NewHashInfo(utils.SHA1, f.Sha1)
}
func (f *FileObj) GetSize() int64 {
return f.Size
}
func (f *FileObj) GetName() string {
return f.FileName
}
func (f *FileObj) ModTime() time.Time {
return f.Utm
}
func (f *FileObj) IsDir() bool {
return f.isDir
}
func (f *FileObj) GetID() string {
return f.FileID
}
func (f *FileObj) GetPath() string {
return ""
}
func transFunc(sf driver115.ShareFile) (model.Obj, error) {
timeInt, err := strconv.ParseInt(sf.UpdateTime, 10, 64)
if err != nil {
return nil, err
}
var (
utm = time.Unix(timeInt, 0)
isDir = (sf.IsFile == 0)
fileID = string(sf.FileID)
)
if isDir {
fileID = string(sf.CategoryID)
}
return &FileObj{
Size: int64(sf.Size),
Sha1: sf.Sha1,
Utm: utm,
FileName: string(sf.FileName),
isDir: isDir,
FileID: fileID,
}, nil
}
var UserAgent = driver115.UA115Browser
func (d *Pan115Share) login() error {
var err error
opts := []driver115.Option{
driver115.UA(UserAgent),
}
d.client = driver115.New(opts...)
if _, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, ""); err != nil {
return errors.Wrap(err, "failed to get share snap")
}
cr := &driver115.Credential{}
if d.QRCodeToken != "" {
s := &driver115.QRCodeSession{
UID: d.QRCodeToken,
}
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
return errors.Wrap(err, "failed to login by qrcode")
}
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
d.QRCodeToken = ""
} else if d.Cookie != "" {
if err = cr.FromCookie(d.Cookie); err != nil {
return errors.Wrap(err, "failed to login by cookies")
}
d.client.ImportCredential(cr)
} else {
return errors.New("missing cookie or qrcode account")
}
return d.client.LoginCheck()
}

View File

@ -6,6 +6,13 @@ import (
"encoding/base64"
"encoding/hex"
"fmt"
"golang.org/x/time/rate"
"io"
"net/http"
"net/url"
"sync"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
@ -17,14 +24,12 @@ import (
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
"io"
"net/http"
"net/url"
)
type Pan123 struct {
model.Storage
Addition
apiRateLimit sync.Map
}
func (d *Pan123) Config() driver.Config {
@ -232,6 +237,9 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return err
}
uploader := s3manager.NewUploader(s)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket,
Key: &resp.Data.Key,
@ -250,4 +258,11 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return err
}
func (d *Pan123) APIRateLimit(api string) bool {
limiter, _ := d.apiRateLimit.LoadOrStore(api,
rate.NewLimiter(rate.Every(time.Millisecond*700), 1))
ins := limiter.(*rate.Limiter)
return ins.Allow()
}
var _ driver.Driver = (*Pan123)(nil)

View File

@ -107,7 +107,7 @@ func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.Fi
if err != nil {
return err
}
up(j * 100 / chunkCount)
up(float64(j) * 100 / float64(chunkCount))
}
}
// complete s3 upload

View File

@ -3,12 +3,18 @@ package _123
import (
"errors"
"fmt"
"hash/crc32"
"math"
"math/rand"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
resty "github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
)
@ -18,7 +24,7 @@ const (
Api = "https://www.123pan.com/api"
AApi = "https://www.123pan.com/a/api"
BApi = "https://www.123pan.com/b/api"
MainApi = Api
MainApi = BApi
SignIn = MainApi + "/user/sign_in"
Logout = MainApi + "/user/logout"
UserInfo = MainApi + "/user/info"
@ -37,6 +43,104 @@ const (
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
)
func signPath(path string, os string, version string) (k string, v string) {
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
now := time.Now().In(time.FixedZone("CST", 8*3600))
timestamp := fmt.Sprint(now.Unix())
nowStr := []byte(now.Format("200601021504"))
for i := 0; i < len(nowStr); i++ {
nowStr[i] = table[nowStr[i]-48]
}
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
}
func GetApi(rawUrl string) string {
u, _ := url.Parse(rawUrl)
query := u.Query()
query.Add(signPath(u.Path, "web", "3"))
u.RawQuery = query.Encode()
return u.String()
}
//func GetApi(url string) string {
// vm := js.New()
// vm.Set("url", url[22:])
// r, err := vm.RunString(`
// (function(e){
// function A(t, e) {
// e = 1 < arguments.length && void 0 !== e ? e : 10;
// for (var n = function() {
// for (var t = [], e = 0; e < 256; e++) {
// for (var n = e, r = 0; r < 8; r++)
// n = 1 & n ? 3988292384 ^ n >>> 1 : n >>> 1;
// t[e] = n
// }
// return t
// }(), r = function(t) {
// t = t.replace(/\\r\\n/g, "\\n");
// for (var e = "", n = 0; n < t.length; n++) {
// var r = t.charCodeAt(n);
// r < 128 ? e += String.fromCharCode(r) : e = 127 < r && r < 2048 ? (e += String.fromCharCode(r >> 6 | 192)) + String.fromCharCode(63 & r | 128) : (e = (e += String.fromCharCode(r >> 12 | 224)) + String.fromCharCode(r >> 6 & 63 | 128)) + String.fromCharCode(63 & r | 128)
// }
// return e
// }(t), a = -1, i = 0; i < r.length; i++)
// a = a >>> 8 ^ n[255 & (a ^ r.charCodeAt(i))];
// return (a = (-1 ^ a) >>> 0).toString(e)
// }
//
// function v(t) {
// return (v = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(t) {
// return typeof t
// }
// : function(t) {
// return t && "function" == typeof Symbol && t.constructor === Symbol && t !== Symbol.prototype ? "symbol" : typeof t
// }
// )(t)
// }
//
// for (p in a = Math.round(1e7 * Math.random()),
// o = Math.round(((new Date).getTime() + 60 * (new Date).getTimezoneOffset() * 1e3 + 288e5) / 1e3).toString(),
// m = ["a", "d", "e", "f", "g", "h", "l", "m", "y", "i", "j", "n", "o", "p", "k", "q", "r", "s", "t", "u", "b", "c", "v", "w", "s", "z"],
// u = function(t, e, n) {
// var r;
// n = 2 < arguments.length && void 0 !== n ? n : 8;
// return 0 === arguments.length ? null : (r = "object" === v(t) ? t : (10 === "".concat(t).length && (t = 1e3 * Number.parseInt(t)),
// new Date(t)),
// t += 6e4 * new Date(t).getTimezoneOffset(),
// {
// y: (r = new Date(t + 36e5 * n)).getFullYear(),
// m: r.getMonth() + 1 < 10 ? "0".concat(r.getMonth() + 1) : r.getMonth() + 1,
// d: r.getDate() < 10 ? "0".concat(r.getDate()) : r.getDate(),
// h: r.getHours() < 10 ? "0".concat(r.getHours()) : r.getHours(),
// f: r.getMinutes() < 10 ? "0".concat(r.getMinutes()) : r.getMinutes()
// })
// }(o),
// h = u.y,
// g = u.m,
// l = u.d,
// c = u.h,
// u = u.f,
// d = [h, g, l, c, u].join(""),
// f = [],
// d)
// f.push(m[Number(d[p])]);
// return h = A(f.join("")),
// g = A("".concat(o, "|").concat(a, "|").concat(e, "|").concat("web", "|").concat("3", "|").concat(h)),
// "".concat(h, "=").concat(o, "-").concat(a, "-").concat(g);
// })(url)
// `)
// if err != nil {
// fmt.Println(err)
// return url
// }
// v, _ := r.Export().(string)
// return url + "?" + v
//}
func (d *Pan123) login() error {
var body base.Json
if utils.IsEmailFormat(d.Username) {
@ -56,9 +160,9 @@ func (d *Pan123) login() error {
SetHeaders(map[string]string{
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"user-agent": "Dart/2.19(dart:io)",
"platform": "android",
"app-version": "36",
"user-agent": "Dart/2.19(dart:io)-alist",
"platform": "web",
"app-version": "3",
//"user-agent": base.UserAgent,
}).
SetBody(body).Post(SignIn)
@ -93,9 +197,9 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"authorization": "Bearer " + d.AccessToken,
"user-agent": "Dart/2.19(dart:io)",
"platform": "android",
"app-version": "36",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) alist-client",
"platform": "web",
"app-version": "3",
//"user-agent": base.UserAgent,
})
if callback != nil {
@ -109,7 +213,7 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
// return nil, err
//}
//req.SetQueryParam("auth-key", *authKey)
res, err := req.Execute(method, url)
res, err := req.Execute(method, GetApi(url))
if err != nil {
return nil, err
}
@ -131,17 +235,27 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
func (d *Pan123) getFiles(parentId string) ([]File, error) {
page := 1
res := make([]File, 0)
// 2024-02-06 fix concurrency by 123pan
for {
if !d.APIRateLimit(FileList) {
time.Sleep(time.Millisecond * 200)
continue
}
var resp Files
query := map[string]string{
"driveId": "0",
"limit": "100",
"next": "0",
"orderBy": d.OrderBy,
"orderDirection": d.OrderDirection,
"parentFileId": parentId,
"trashed": "false",
"Page": strconv.Itoa(page),
"driveId": "0",
"limit": "100",
"next": "0",
"orderBy": d.OrderBy,
"orderDirection": d.OrderDirection,
"parentFileId": parentId,
"trashed": "false",
"SearchData": "",
"Page": strconv.Itoa(page),
"OnlyLookAbnormalFile": "0",
"event": "homeListFile",
"operateType": "4",
"inDirectSpace": "false",
}
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)

View File

@ -0,0 +1,77 @@
package _123Link
import (
"context"
stdpath "path"
"time"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
)
type Pan123Link struct {
model.Storage
Addition
root *Node
}
func (d *Pan123Link) Config() driver.Config {
return config
}
func (d *Pan123Link) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Pan123Link) Init(ctx context.Context) error {
node, err := BuildTree(d.OriginURLs)
if err != nil {
return err
}
node.calSize()
d.root = node
return nil
}
func (d *Pan123Link) Drop(ctx context.Context) error {
return nil
}
func (d *Pan123Link) Get(ctx context.Context, path string) (model.Obj, error) {
node := GetNodeFromRootByPath(d.root, path)
return nodeToObj(node, path)
}
func (d *Pan123Link) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
node := GetNodeFromRootByPath(d.root, dir.GetPath())
if node == nil {
return nil, errs.ObjectNotFound
}
if node.isFile() {
return nil, errs.NotFolder
}
return utils.SliceConvert(node.Children, func(node *Node) (model.Obj, error) {
return nodeToObj(node, stdpath.Join(dir.GetPath(), node.Name))
})
}
func (d *Pan123Link) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
node := GetNodeFromRootByPath(d.root, file.GetPath())
if node == nil {
return nil, errs.ObjectNotFound
}
if node.isFile() {
signUrl, err := SignURL(node.Url, d.PrivateKey, d.UID, time.Duration(d.ValidDuration)*time.Minute)
if err != nil {
return nil, err
}
return &model.Link{
URL: signUrl,
}, nil
}
return nil, errs.NotFile
}
var _ driver.Driver = (*Pan123Link)(nil)

23
drivers/123_link/meta.go Normal file
View File

@ -0,0 +1,23 @@
package _123Link
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
OriginURLs string `json:"origin_urls" type:"text" required:"true" default:"https://vip.123pan.com/29/folder/file.mp3" help:"structure:FolderName:\n [FileSize:][Modified:]Url"`
PrivateKey string `json:"private_key"`
UID uint64 `json:"uid" type:"number"`
ValidDuration int64 `json:"valid_duration" type:"number" default:"30" help:"minutes"`
}
var config = driver.Config{
Name: "123PanLink",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Pan123Link{}
})
}

152
drivers/123_link/parse.go Normal file
View File

@ -0,0 +1,152 @@
package _123Link
import (
"fmt"
url2 "net/url"
stdpath "path"
"strconv"
"strings"
"time"
)
// build tree from text, text structure definition:
/**
* FolderName:
* [FileSize:][Modified:]Url
*/
/**
* For example:
* folder1:
* name1:url1
* url2
* folder2:
* url3
* url4
* url5
* folder3:
* url6
* url7
* url8
*/
// if there are no name, use the last segment of url as name
func BuildTree(text string) (*Node, error) {
lines := strings.Split(text, "\n")
var root = &Node{Level: -1, Name: "root"}
stack := []*Node{root}
for _, line := range lines {
// calculate indent
indent := 0
for i := 0; i < len(line); i++ {
if line[i] != ' ' {
break
}
indent++
}
// if indent is not a multiple of 2, it is an error
if indent%2 != 0 {
return nil, fmt.Errorf("the line '%s' is not a multiple of 2", line)
}
// calculate level
level := indent / 2
line = strings.TrimSpace(line[indent:])
// if the line is empty, skip
if line == "" {
continue
}
// if level isn't greater than the level of the top of the stack
// it is not the child of the top of the stack
for level <= stack[len(stack)-1].Level {
// pop the top of the stack
stack = stack[:len(stack)-1]
}
// if the line is a folder
if isFolder(line) {
// create a new node
node := &Node{
Level: level,
Name: strings.TrimSuffix(line, ":"),
}
// add the node to the top of the stack
stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
// push the node to the stack
stack = append(stack, node)
} else {
// if the line is a file
// create a new node
node, err := parseFileLine(line)
if err != nil {
return nil, err
}
node.Level = level
// add the node to the top of the stack
stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
}
}
return root, nil
}
func isFolder(line string) bool {
return strings.HasSuffix(line, ":")
}
// line definition:
// [FileSize:][Modified:]Url
func parseFileLine(line string) (*Node, error) {
// if there is no url, it is an error
if !strings.Contains(line, "http://") && !strings.Contains(line, "https://") {
return nil, fmt.Errorf("invalid line: %s, because url is required for file", line)
}
index := strings.Index(line, "http://")
if index == -1 {
index = strings.Index(line, "https://")
}
url := line[index:]
info := line[:index]
node := &Node{
Url: url,
}
name := stdpath.Base(url)
unescape, err := url2.PathUnescape(name)
if err == nil {
name = unescape
}
node.Name = name
if index > 0 {
if !strings.HasSuffix(info, ":") {
return nil, fmt.Errorf("invalid line: %s, because file info must end with ':'", line)
}
info = info[:len(info)-1]
if info == "" {
return nil, fmt.Errorf("invalid line: %s, because file name can't be empty", line)
}
infoParts := strings.Split(info, ":")
size, err := strconv.ParseInt(infoParts[0], 10, 64)
if err != nil {
return nil, fmt.Errorf("invalid line: %s, because file size must be an integer", line)
}
node.Size = size
if len(infoParts) > 1 {
modified, err := strconv.ParseInt(infoParts[1], 10, 64)
if err != nil {
return nil, fmt.Errorf("invalid line: %s, because file modified must be an unix timestamp", line)
}
node.Modified = modified
} else {
node.Modified = time.Now().Unix()
}
}
return node, nil
}
func splitPath(path string) []string {
if path == "/" {
return []string{"root"}
}
parts := strings.Split(path, "/")
parts[0] = "root"
return parts
}
func GetNodeFromRootByPath(root *Node, path string) *Node {
return root.getByPath(splitPath(path))
}

66
drivers/123_link/types.go Normal file
View File

@ -0,0 +1,66 @@
package _123Link
import (
"time"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
)
// Node is a node in the folder tree
type Node struct {
Url string
Name string
Level int
Modified int64
Size int64
Children []*Node
}
func (node *Node) getByPath(paths []string) *Node {
if len(paths) == 0 || node == nil {
return nil
}
if node.Name != paths[0] {
return nil
}
if len(paths) == 1 {
return node
}
for _, child := range node.Children {
tmp := child.getByPath(paths[1:])
if tmp != nil {
return tmp
}
}
return nil
}
func (node *Node) isFile() bool {
return node.Url != ""
}
func (node *Node) calSize() int64 {
if node.isFile() {
return node.Size
}
var size int64 = 0
for _, child := range node.Children {
size += child.calSize()
}
node.Size = size
return size
}
func nodeToObj(node *Node, path string) (model.Obj, error) {
if node == nil {
return nil, errs.ObjectNotFound
}
return &model.Object{
Name: node.Name,
Size: node.Size,
Modified: time.Unix(node.Modified, 0),
IsFolder: !node.isFile(),
Path: path,
}, nil
}

30
drivers/123_link/util.go Normal file
View File

@ -0,0 +1,30 @@
package _123Link
import (
"crypto/md5"
"fmt"
"math/rand"
"net/url"
"time"
)
func SignURL(originURL, privateKey string, uid uint64, validDuration time.Duration) (newURL string, err error) {
if privateKey == "" {
return originURL, nil
}
var (
ts = time.Now().Add(validDuration).Unix() // 有效时间戳
rInt = rand.Int() // 随机正整数
objURL *url.URL
)
objURL, err = url.Parse(originURL)
if err != nil {
return "", err
}
authKey := fmt.Sprintf("%d-%d-%d-%x", ts, rInt, uid, md5.Sum([]byte(fmt.Sprintf("%s-%d-%d-%d-%s",
objURL.Path, ts, rInt, uid, privateKey))))
v := objURL.Query()
v.Add("auth_key", authKey)
objURL.RawQuery = v.Encode()
return objURL.String(), nil
}

View File

@ -35,25 +35,40 @@ func (d *Yun139) Init(ctx context.Context) error {
if d.Authorization == "" {
return fmt.Errorf("authorization is empty")
}
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
if err != nil {
return err
}
decodeStr := string(decode)
splits := strings.Split(decodeStr, ":")
if len(splits) < 2 {
return fmt.Errorf("authorization is invalid, splits < 2")
}
d.Account = splits[1]
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
"qryUserExternInfoReq": base.Json{
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
switch d.Addition.Type {
case MetaPersonalNew:
if len(d.Addition.RootFolderID) == 0 {
d.RootFolderID = "/"
}
return nil
case MetaPersonal:
if len(d.Addition.RootFolderID) == 0 {
d.RootFolderID = "root"
}
fallthrough
case MetaFamily:
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
if err != nil {
return err
}
decodeStr := string(decode)
splits := strings.Split(decodeStr, ":")
if len(splits) < 2 {
return fmt.Errorf("authorization is invalid, splits < 2")
}
d.Account = splits[1]
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
"qryUserExternInfoReq": base.Json{
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
},
}, nil)
return err
}, nil)
return err
default:
return errs.NotImplement
}
}
func (d *Yun139) Drop(ctx context.Context) error {
@ -61,35 +76,65 @@ func (d *Yun139) Drop(ctx context.Context) error {
}
func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if d.isFamily() {
return d.familyGetFiles(dir.GetID())
} else {
switch d.Addition.Type {
case MetaPersonalNew:
return d.personalGetFiles(dir.GetID())
case MetaPersonal:
return d.getFiles(dir.GetID())
case MetaFamily:
return d.familyGetFiles(dir.GetID())
default:
return nil, errs.NotImplement
}
}
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
u, err := d.getLink(file.GetID())
var url string
var err error
switch d.Addition.Type {
case MetaPersonalNew:
url, err = d.personalGetLink(file.GetID())
case MetaPersonal:
fallthrough
case MetaFamily:
url, err = d.getLink(file.GetID())
default:
return nil, errs.NotImplement
}
if err != nil {
return nil, err
}
return &model.Link{URL: u}, nil
return &model.Link{URL: url}, nil
}
func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"createCatalogExtReq": base.Json{
"parentCatalogID": parentDir.GetID(),
"newCatalogName": dirName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
var err error
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"parentFileId": parentDir.GetID(),
"name": dirName,
"description": "",
"type": "folder",
"fileRenameMode": "force_rename",
}
pathname := "/hcy/file/create"
_, err = d.personalPost(pathname, data, nil)
case MetaPersonal:
data := base.Json{
"createCatalogExtReq": base.Json{
"parentCatalogID": parentDir.GetID(),
"newCatalogName": dirName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
},
}
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
if d.isFamily() {
data = base.Json{
}
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
_, err = d.post(pathname, data, nil)
case MetaFamily:
data := base.Json{
"cloudID": d.CloudID,
"commonAccountInfo": base.Json{
"account": d.Account,
@ -97,144 +142,198 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
},
"docLibName": dirName,
}
pathname = "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
pathname := "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
_, err = d.post(pathname, data, nil)
default:
err = errs.NotImplement
}
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
if d.isFamily() {
return errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": "304",
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"fileIds": []string{srcObj.GetID()},
"toParentFileId": dstDir.GetID(),
}
pathname := "/hcy/file/batchMove"
_, err := d.personalPost(pathname, data, nil)
if err != nil {
return nil, err
}
return srcObj, nil
case MetaPersonal:
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": "304",
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
if err != nil {
return nil, err
}
return srcObj, nil
default:
return nil, errs.NotImplement
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
if d.isFamily() {
return errs.NotImplement
}
var data base.Json
var pathname string
if srcObj.IsDir() {
data = base.Json{
"catalogID": srcObj.GetID(),
"catalogName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
var err error
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"fileId": srcObj.GetID(),
"name": newName,
"description": "",
}
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
} else {
data = base.Json{
"contentID": srcObj.GetID(),
"contentName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
pathname := "/hcy/file/update"
_, err = d.personalPost(pathname, data, nil)
case MetaPersonal:
var data base.Json
var pathname string
if srcObj.IsDir() {
data = base.Json{
"catalogID": srcObj.GetID(),
"catalogName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
} else {
data = base.Json{
"contentID": srcObj.GetID(),
"contentName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
}
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
_, err = d.post(pathname, data, nil)
default:
err = errs.NotImplement
}
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
if d.isFamily() {
return errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": 309,
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
var err error
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"fileIds": []string{srcObj.GetID()},
"toParentFileId": dstDir.GetID(),
}
pathname := "/hcy/file/batchCopy"
_, err := d.personalPost(pathname, data, nil)
return err
case MetaPersonal:
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": 309,
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err = d.post(pathname, data, nil)
default:
err = errs.NotImplement
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
var contentInfoList []string
var catalogInfoList []string
if obj.IsDir() {
catalogInfoList = append(catalogInfoList, obj.GetID())
} else {
contentInfoList = append(contentInfoList, obj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 2,
"actionType": 201,
"taskInfo": base.Json{
"newCatalogID": "",
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
if d.isFamily() {
data = base.Json{
"catalogList": catalogInfoList,
"contentList": contentInfoList,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
"sourceCatalogType": 1002,
"taskType": 2,
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"fileIds": []string{obj.GetID()},
}
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
pathname := "/hcy/recyclebin/batchTrash"
_, err := d.personalPost(pathname, data, nil)
return err
case MetaPersonal:
fallthrough
case MetaFamily:
var contentInfoList []string
var catalogInfoList []string
if obj.IsDir() {
catalogInfoList = append(catalogInfoList, obj.GetID())
} else {
contentInfoList = append(contentInfoList, obj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 2,
"actionType": 201,
"taskInfo": base.Json{
"newCatalogID": "",
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
if d.isFamily() {
data = base.Json{
"catalogList": catalogInfoList,
"contentList": contentInfoList,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
"sourceCatalogType": 1002,
"taskType": 2,
}
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
}
_, err := d.post(pathname, data, nil)
return err
default:
return errs.NotImplement
}
_, err := d.post(pathname, data, nil)
return err
}
const (
@ -254,94 +353,208 @@ func getPartSize(size int64) int64 {
}
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
data := base.Json{
"manualRename": 2,
"operation": 0,
"fileCount": 1,
"totalSize": 0, // 去除上传大小限制
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": 0, // 去除上传大小限制
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
"parentCatalogID": dstDir.GetID(),
"newCatalogName": "",
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
if d.isFamily() {
data = d.newJson(base.Json{
"fileCount": 1,
"manualRename": 2,
"operation": 0,
"path": "",
"seqNo": "",
"totalSize": 0,
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": 0,
// "digest": "5a3231986ce7a6b46e408612d385bafa"
switch d.Addition.Type {
case MetaPersonalNew:
var err error
fullHash := stream.GetHash().GetHash(utils.SHA256)
if len(fullHash) <= 0 {
tmpF, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
fullHash, err = utils.HashFile(utils.SHA256, tmpF)
if err != nil {
return err
}
}
// return errs.NotImplement
data := base.Json{
"contentHash": fullHash,
"contentHashAlgorithm": "SHA256",
"contentType": "application/octet-stream",
"parallelUpload": false,
"partInfos": []base.Json{{
"parallelHashCtx": base.Json{
"partOffset": 0,
},
"partNumber": 1,
"partSize": stream.GetSize(),
}},
})
pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
return errs.NotImplement
}
var resp UploadResp
_, err := d.post(pathname, data, &resp)
if err != nil {
return err
}
// Progress
p := driver.NewProgress(stream.GetSize(), up)
var partSize = getPartSize(stream.GetSize())
part := (stream.GetSize() + partSize - 1) / partSize
if part == 0 {
part = 1
}
for i := int64(0); i < part; i++ {
if utils.IsCanceled(ctx) {
return ctx.Err()
"size": stream.GetSize(),
"parentFileId": dstDir.GetID(),
"name": stream.GetName(),
"type": "file",
"fileRenameMode": "auto_rename",
}
start := i * partSize
byteSize := stream.GetSize() - start
if byteSize > partSize {
byteSize = partSize
}
limitReader := io.LimitReader(stream, byteSize)
// Update Progress
r := io.TeeReader(limitReader, p)
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
pathname := "/hcy/file/create"
var resp PersonalUploadResp
_, err = d.personalPost(pathname, data, &resp)
if err != nil {
return err
}
if resp.Data.Exist || resp.Data.RapidUpload {
return nil
}
// Progress
p := driver.NewProgress(stream.GetSize(), up)
// Update Progress
r := io.TeeReader(stream, p)
req, err := http.NewRequest("PUT", resp.Data.PartInfos[0].UploadUrl, r)
if err != nil {
return err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
req.Header.Set("rangeType", "0")
req.ContentLength = byteSize
req.Header.Set("Content-Type", "application/octet-stream")
req.Header.Set("Content-Length", fmt.Sprint(stream.GetSize()))
req.Header.Set("Origin", "https://yun.139.com")
req.Header.Set("Referer", "https://yun.139.com/")
req.ContentLength = stream.GetSize()
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
_ = res.Body.Close()
log.Debugf("%+v", res)
if res.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
}
}
return nil
data = base.Json{
"contentHash": fullHash,
"contentHashAlgorithm": "SHA256",
"fileId": resp.Data.FileId,
"uploadId": resp.Data.UploadId,
}
_, err = d.personalPost("/hcy/file/complete", data, nil)
if err != nil {
return err
}
return nil
case MetaPersonal:
fallthrough
case MetaFamily:
data := base.Json{
"manualRename": 2,
"operation": 0,
"fileCount": 1,
"totalSize": 0, // 去除上传大小限制
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": 0, // 去除上传大小限制
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
"parentCatalogID": dstDir.GetID(),
"newCatalogName": "",
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
if d.isFamily() {
// data = d.newJson(base.Json{
// "fileCount": 1,
// "manualRename": 2,
// "operation": 0,
// "path": "",
// "seqNo": "",
// "totalSize": 0,
// "uploadContentList": []base.Json{{
// "contentName": stream.GetName(),
// "contentSize": 0,
// // "digest": "5a3231986ce7a6b46e408612d385bafa"
// }},
// })
// pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
return errs.NotImplement
}
var resp UploadResp
_, err := d.post(pathname, data, &resp)
if err != nil {
return err
}
// Progress
p := driver.NewProgress(stream.GetSize(), up)
var partSize = getPartSize(stream.GetSize())
part := (stream.GetSize() + partSize - 1) / partSize
if part == 0 {
part = 1
}
for i := int64(0); i < part; i++ {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
start := i * partSize
byteSize := stream.GetSize() - start
if byteSize > partSize {
byteSize = partSize
}
limitReader := io.LimitReader(stream, byteSize)
// Update Progress
r := io.TeeReader(limitReader, p)
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
if err != nil {
return err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
req.Header.Set("rangeType", "0")
req.ContentLength = byteSize
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
_ = res.Body.Close()
log.Debugf("%+v", res)
if res.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
}
}
return nil
default:
return errs.NotImplement
}
}
func (d *Yun139) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
switch d.Addition.Type {
case MetaPersonalNew:
var resp base.Json
var uri string
data := base.Json{
"category": "video",
"fileId": args.Obj.GetID(),
}
switch args.Method {
case "video_preview":
uri = "/hcy/videoPreview/getPreviewInfo"
default:
return nil, errs.NotSupport
}
_, err := d.personalPost(uri, data, &resp)
if err != nil {
return nil, err
}
return resp["data"], nil
default:
return nil, errs.NotImplement
}
}
var _ driver.Driver = (*Yun139)(nil)

View File

@ -9,7 +9,7 @@ type Addition struct {
//Account string `json:"account" required:"true"`
Authorization string `json:"authorization" type:"text" required:"true"`
driver.RootID
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
Type string `json:"type" type:"select" options:"personal,family,personal_new" default:"personal"`
CloudID string `json:"cloud_id"`
}

View File

@ -1,5 +1,11 @@
package _139
const (
MetaPersonal string = "personal"
MetaFamily string = "family"
MetaPersonalNew string = "personal_new"
)
type BaseResp struct {
Success bool `json:"success"`
Code string `json:"code"`
@ -10,7 +16,7 @@ type Catalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CatalogType int `json:"catalogType"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
UpdateTime string `json:"updateTime"`
//IsShared bool `json:"isShared"`
//CatalogLevel int `json:"catalogLevel"`
@ -63,7 +69,7 @@ type Content struct {
//ParentCatalogID string `json:"parentCatalogId"`
//Channel string `json:"channel"`
//GeoLocFlag string `json:"geoLocFlag"`
//Digest string `json:"digest"`
Digest string `json:"digest"`
//Version string `json:"version"`
//FileEtag string `json:"fileEtag"`
//FileVersion string `json:"fileVersion"`
@ -141,7 +147,7 @@ type CloudContent struct {
//ContentSuffix string `json:"contentSuffix"`
ContentSize int64 `json:"contentSize"`
//ContentDesc string `json:"contentDesc"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
//Shottime interface{} `json:"shottime"`
LastUpdateTime string `json:"lastUpdateTime"`
ThumbnailURL string `json:"thumbnailURL"`
@ -165,7 +171,7 @@ type CloudCatalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CloudID string `json:"cloudID"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
LastUpdateTime string `json:"lastUpdateTime"`
//Creator string `json:"creator"`
//CreatorNickname string `json:"creatorNickname"`
@ -185,3 +191,42 @@ type QueryContentListResp struct {
RecallContent interface{} `json:"recallContent"`
} `json:"data"`
}
type PersonalThumbnail struct {
Style string `json:"style"`
Url string `json:"url"`
}
type PersonalFileItem struct {
FileId string `json:"fileId"`
Name string `json:"name"`
Size int64 `json:"size"`
Type string `json:"type"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
Thumbnails []PersonalThumbnail `json:"thumbnailUrls"`
}
type PersonalListResp struct {
BaseResp
Data struct {
Items []PersonalFileItem `json:"items"`
NextPageCursor string `json:"nextPageCursor"`
}
}
type PersonalPartInfo struct {
PartNumber int `json:"partNumber"`
UploadUrl string `json:"uploadUrl"`
}
type PersonalUploadResp struct {
BaseResp
Data struct {
FileId string `json:"fileId"`
PartInfos []PersonalPartInfo `json:"partInfos"`
Exist bool `json:"exist"`
RapidUpload bool `json:"rapidUpload"`
UploadId string `json:"uploadId"`
}
}

View File

@ -48,7 +48,7 @@ func calSign(body, ts, randStr string) string {
}
func getTime(t string) time.Time {
stamp, _ := time.ParseInLocation("20060102150405", t, time.Local)
stamp, _ := time.ParseInLocation("20060102150405", t, utils.CNLoc)
return stamp
}
@ -139,6 +139,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
Name: catalog.CatalogName,
Size: 0,
Modified: getTime(catalog.UpdateTime),
Ctime: getTime(catalog.CreateTime),
IsFolder: true,
}
files = append(files, &f)
@ -150,6 +151,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.UpdateTime),
HashInfo: utils.NewHashInfo(utils.MD5, content.Digest),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
@ -202,6 +204,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
Size: 0,
IsFolder: true,
Modified: getTime(catalog.LastUpdateTime),
Ctime: getTime(catalog.CreateTime),
}
files = append(files, &f)
}
@ -212,6 +215,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.LastUpdateTime),
Ctime: getTime(content.CreateTime),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
@ -248,3 +252,154 @@ func unicode(str string) string {
textUnquoted := textQuoted[1 : len(textQuoted)-1]
return textUnquoted
}
func (d *Yun139) personalRequest(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
url := "https://personal-kd-njs.yun.139.com" + pathname
req := base.RestyClient.R()
randStr := random.String(16)
ts := time.Now().Format("2006-01-02 15:04:05")
if callback != nil {
callback(req)
}
body, err := utils.Json.Marshal(req.Body)
if err != nil {
return nil, err
}
sign := calSign(string(body), ts, randStr)
svcType := "1"
if d.isFamily() {
svcType = "2"
}
req.SetHeaders(map[string]string{
"Accept": "application/json, text/plain, */*",
"Authorization": "Basic " + d.Authorization,
"Caller": "web",
"Cms-Device": "default",
"Mcloud-Channel": "1000101",
"Mcloud-Client": "10701",
"Mcloud-Route": "001",
"Mcloud-Sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
"Mcloud-Version": "7.13.0",
"Origin": "https://yun.139.com",
"Referer": "https://yun.139.com/w/",
"x-DeviceInfo": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
"x-huawei-channelSrc": "10000034",
"x-inner-ntwk": "2",
"x-m4c-caller": "PC",
"x-m4c-src": "10002",
"x-SvcType": svcType,
"X-Yun-Api-Version": "v1",
"X-Yun-App-Channel": "10000034",
"X-Yun-Channel-Source": "10000034",
"X-Yun-Client-Info": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
"X-Yun-Module-Type": "100",
"X-Yun-Svc-Type": "1",
})
var e BaseResp
req.SetResult(&e)
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
log.Debugln(res.String())
if !e.Success {
return nil, errors.New(e.Message)
}
if resp != nil {
err = utils.Json.Unmarshal(res.Body(), resp)
if err != nil {
return nil, err
}
}
return res.Body(), nil
}
func (d *Yun139) personalPost(pathname string, data interface{}, resp interface{}) ([]byte, error) {
return d.personalRequest(pathname, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, resp)
}
func getPersonalTime(t string) time.Time {
stamp, err := time.ParseInLocation("2006-01-02T15:04:05.999-07:00", t, utils.CNLoc)
if err != nil {
panic(err)
}
return stamp
}
func (d *Yun139) personalGetFiles(fileId string) ([]model.Obj, error) {
files := make([]model.Obj, 0)
nextPageCursor := ""
for {
data := base.Json{
"imageThumbnailStyleList": []string{"Small", "Large"},
"orderBy": "updated_at",
"orderDirection": "DESC",
"pageInfo": base.Json{
"pageCursor": nextPageCursor,
"pageSize": 100,
},
"parentFileId": fileId,
}
var resp PersonalListResp
_, err := d.personalPost("/hcy/file/list", data, &resp)
if err != nil {
return nil, err
}
nextPageCursor = resp.Data.NextPageCursor
for _, item := range resp.Data.Items {
var isFolder = (item.Type == "folder")
var f model.Obj
if isFolder {
f = &model.Object{
ID: item.FileId,
Name: item.Name,
Size: 0,
Modified: getPersonalTime(item.UpdatedAt),
Ctime: getPersonalTime(item.CreatedAt),
IsFolder: isFolder,
}
} else {
var Thumbnails = item.Thumbnails
var ThumbnailUrl string
if len(Thumbnails) > 0 {
ThumbnailUrl = Thumbnails[len(Thumbnails)-1].Url
}
f = &model.ObjThumb{
Object: model.Object{
ID: item.FileId,
Name: item.Name,
Size: item.Size,
Modified: getPersonalTime(item.UpdatedAt),
Ctime: getPersonalTime(item.CreatedAt),
IsFolder: isFolder,
},
Thumbnail: model.Thumbnail{Thumbnail: ThumbnailUrl},
}
}
files = append(files, f)
}
if len(nextPageCursor) == 0 {
break
}
}
return files, nil
}
func (d *Yun139) personalGetLink(fileId string) (string, error) {
data := base.Json{
"fileId": fileId,
}
res, err := d.personalPost("/hcy/file/getDownloadUrl",
data, nil)
if err != nil {
return "", err
}
var cdnUrl = jsoniter.Get(res, "data", "cdnUrl").ToString()
if cdnUrl != "" {
return cdnUrl, nil
} else {
return jsoniter.Get(res, "data", "url").ToString(), nil
}
}

View File

@ -380,7 +380,7 @@ func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.F
if err != nil {
return err
}
up(int(i * 100 / count))
up(float64(i) * 100 / float64(count))
}
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
sliceMd5 := fileMd5

View File

@ -27,10 +27,15 @@ type Cloud189PC struct {
tokenInfo *AppSessionResp
uploadThread int
storageConfig driver.Config
}
func (y *Cloud189PC) Config() driver.Config {
return config
if y.storageConfig.Name == "" {
y.storageConfig = config
}
return y.storageConfig
}
func (y *Cloud189PC) GetAddition() driver.Additional {
@ -38,6 +43,9 @@ func (y *Cloud189PC) GetAddition() driver.Additional {
}
func (y *Cloud189PC) Init(ctx context.Context) (err error) {
// 兼容旧上传接口
y.storageConfig.NoOverwriteUpload = y.isFamily() && (y.Addition.RapidUpload || y.Addition.UploadMethod == "old")
// 处理个人云和家庭云参数
if y.isFamily() && y.RootFolderID == "-11" {
y.RootFolderID = ""
@ -118,10 +126,11 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
// 重定向获取真实链接
downloadUrl.URL = strings.Replace(strings.ReplaceAll(downloadUrl.URL, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetContext(ctx).Get(downloadUrl.URL)
res, err := base.NoRedirectClient.R().SetContext(ctx).SetDoNotParseResponse(true).Get(downloadUrl.URL)
if err != nil {
return nil, err
}
defer res.RawBody().Close()
if res.StatusCode() == 302 {
downloadUrl.URL = res.Header().Get("location")
}
@ -302,6 +311,13 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
}
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 响应时间长,按需启用
if y.Addition.RapidUpload {
if newObj, err := y.RapidUpload(ctx, dstDir, stream); err == nil {
return newObj, nil
}
}
switch y.UploadMethod {
case "old":
return y.OldUpload(ctx, dstDir, stream, up)

View File

@ -16,6 +16,7 @@ type Addition struct {
FamilyID string `json:"family_id"`
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
RapidUpload bool `json:"rapid_upload"`
NoUseOcr bool `json:"no_use_ocr"`
}

View File

@ -513,7 +513,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
if err != nil {
return err
}
up(int(threadG.Success()) * 100 / count)
up(float64(threadG.Success()) * 100 / float64(count))
return nil
})
}
@ -546,16 +546,30 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
return resp.toFile(), nil
}
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
fileMd5 := stream.GetHash().GetHash(utils.MD5)
if len(fileMd5) < utils.MD5.Width {
return nil, errors.New("invalid hash")
}
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()))
if err != nil {
return nil, err
}
if uploadInfo.FileDataExists != 1 {
return nil, errors.New("rapid upload fail")
}
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId)
}
// 快传
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
var sliceSize = partSize(file.GetSize())
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
@ -662,7 +676,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
return err
}
up(int(threadG.Success()) * 100 / len(uploadUrls))
up(float64(threadG.Success()) * 100 / float64(len(uploadUrls)))
uploadProgress.UploadParts[i] = ""
return nil
})
@ -739,68 +753,24 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string
// 旧版本上传,家庭云不支持覆盖
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
// 计算md5
fileMd5 := md5.New()
if _, err := io.Copy(fileMd5, tempFile); err != nil {
fileMd5, err := utils.HashFile(utils.MD5, tempFile)
if err != nil {
return nil, err
}
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
return nil, err
}
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
// 创建上传会话
var uploadInfo CreateUploadFileResp
fullUrl := API_URL + "/createUploadFile.action"
if y.isFamily() {
fullUrl = API_URL + "/family/file/createFamilyFile.action"
}
_, err = y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"fileMd5": fileMd5Hex,
"fileName": file.GetName(),
"fileSize": fmt.Sprint(file.GetSize()),
"parentId": dstDir.GetID(),
"resumePolicy": "1",
})
} else {
req.SetFormData(map[string]string{
"parentFolderId": dstDir.GetID(),
"fileName": file.GetName(),
"size": fmt.Sprint(file.GetSize()),
"md5": fileMd5Hex,
"opertype": "3",
"flag": "1",
"resumePolicy": "1",
"isLog": "0",
// "baseFileId": "",
// "lastWrite":"",
// "localPath": strings.ReplaceAll(param.LocalPath, "\\", "/"),
// "fileExt": "",
})
}
}, &uploadInfo)
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()))
if err != nil {
return nil, err
}
// 网盘中不存在该文件,开始上传
status := GetUploadFileStatusResp{CreateUploadFileResp: uploadInfo}
for status.Size < file.GetSize() && status.FileDataExists != 1 {
status := GetUploadFileStatusResp{CreateUploadFileResp: *uploadInfo}
for status.GetSize() < file.GetSize() && status.FileDataExists != 1 {
if utils.IsCanceled(ctx) {
return nil, ctx.Err()
}
@ -839,28 +809,70 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
if err != nil {
return nil, err
}
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
return nil, err
}
up(int(status.Size / file.GetSize()))
up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
}
// 提交
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
}
// 创建上传会话
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string) (*CreateUploadFileResp, error) {
var uploadInfo CreateUploadFileResp
fullUrl := API_URL + "/createUploadFile.action"
if y.isFamily() {
fullUrl = API_URL + "/family/file/createFamilyFile.action"
}
_, err := y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"parentId": parentID,
"fileMd5": fileMd5,
"fileName": fileName,
"fileSize": fileSize,
"resumePolicy": "1",
})
} else {
req.SetFormData(map[string]string{
"parentFolderId": parentID,
"fileName": fileName,
"size": fileSize,
"md5": fileMd5,
"opertype": "3",
"flag": "1",
"resumePolicy": "1",
"isLog": "0",
})
}
}, &uploadInfo)
if err != nil {
return nil, err
}
return &uploadInfo, nil
}
// 提交上传文件
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64) (model.Obj, error) {
var resp OldCommitUploadFileResp
_, err = y.post(status.FileCommitUrl, func(req *resty.Request) {
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetHeaders(map[string]string{
"ResumePolicy": "1",
"UploadFileId": fmt.Sprint(status.UploadFileId),
"UploadFileId": fmt.Sprint(uploadFileID),
"FamilyId": fmt.Sprint(y.FamilyID),
})
} else {
req.SetFormData(map[string]string{
"opertype": "3",
"resumePolicy": "1",
"uploadFileId": fmt.Sprint(status.UploadFileId),
"uploadFileId": fmt.Sprint(uploadFileID),
"isLog": "0",
})
}

View File

@ -8,6 +8,7 @@ import (
"path"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
@ -174,13 +175,13 @@ func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
_, err := d.request("/fs/put", http.MethodPut, func(req *resty.Request) {
_, err := d.requestWithTimeout("/fs/put", http.MethodPut, func(req *resty.Request) {
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
SetHeader("Password", d.MetaPassword).
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
SetContentLength(true).
SetBody(io.ReadCloser(stream))
})
}, time.Hour*6)
return err
}

View File

@ -3,6 +3,7 @@ package alist_v3
import (
"fmt"
"net/http"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/op"
@ -56,3 +57,33 @@ func (d *AListV3) request(api, method string, callback base.ReqCallback, retry .
}
return res.Body(), nil
}
func (d *AListV3) requestWithTimeout(api, method string, callback base.ReqCallback, timeout time.Duration, retry ...bool) ([]byte, error) {
url := d.Address + "/api" + api
client := base.NewRestyClient().SetTimeout(timeout)
req := client.R()
req.SetHeader("Authorization", d.Token)
if callback != nil {
callback(req)
}
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
log.Debugf("[alist_v3] response body: %s", res.String())
if res.StatusCode() >= 400 {
return nil, fmt.Errorf("request failed, status: %s", res.Status())
}
code := utils.Json.Get(res.Body(), "code").ToInt()
if code != 200 {
if (code == 401 || code == 403) && !utils.IsBool(retry...) {
err = d.login()
if err != nil {
return nil, err
}
return d.requestWithTimeout(api, method, callback, timeout, true)
}
return nil, fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(res.Body(), "message").ToString())
}
return res.Body(), nil
}

View File

@ -7,7 +7,6 @@ import (
"encoding/base64"
"encoding/hex"
"fmt"
"github.com/alist-org/alist/v3/internal/stream"
"io"
"math"
"math/big"
@ -15,6 +14,8 @@ import (
"os"
"time"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
@ -51,7 +52,7 @@ func (d *AliDrive) Init(ctx context.Context) error {
return err
}
// get driver id
res, err, _ := d.request("https://api.aliyundrive.com/v2/user/get", http.MethodPost, nil, nil)
res, err, _ := d.request("https://api.alipan.com/v2/user/get", http.MethodPost, nil, nil)
if err != nil {
return err
}
@ -105,7 +106,7 @@ func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
"file_id": file.GetID(),
"expire_sec": 14400,
}
res, err, _ := d.request("https://api.aliyundrive.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
res, err, _ := d.request("https://api.alipan.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err != nil {
@ -113,14 +114,14 @@ func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
}
return &model.Link{
Header: http.Header{
"Referer": []string{"https://www.aliyundrive.com/"},
"Referer": []string{"https://www.alipan.com/"},
},
URL: utils.Json.Get(res, "url").ToString(),
}, nil
}
func (d *AliDrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
_, err, _ := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"check_name_mode": "refuse",
"drive_id": d.DriveId,
@ -138,7 +139,7 @@ func (d *AliDrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
}
func (d *AliDrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
_, err, _ := d.request("https://api.aliyundrive.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.alipan.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"check_name_mode": "refuse",
"drive_id": d.DriveId,
@ -155,7 +156,7 @@ func (d *AliDrive) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
}
func (d *AliDrive) Remove(ctx context.Context, obj model.Obj) error {
_, err, _ := d.request("https://api.aliyundrive.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.alipan.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": obj.GetID(),
@ -215,7 +216,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
}
var resp UploadResp
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
_, err, e := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
@ -269,7 +270,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
n, _ := io.NewSectionReader(localFile, o.Int64(), 8).Read(buf[:8])
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
_, err, e := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
if err != nil && e.Code != "PreHashMatched" {
@ -304,11 +305,11 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
}
res.Body.Close()
if count > 0 {
up(i * 100 / count)
up(float64(i) * 100 / float64(count))
}
}
var resp2 base.Json
_, err, e = d.request("https://api.aliyundrive.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
_, err, e = d.request("https://api.alipan.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": resp.FileId,
@ -333,10 +334,10 @@ func (d *AliDrive) Other(ctx context.Context, args model.OtherArgs) (interface{}
}
switch args.Method {
case "doc_preview":
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
url = "https://api.alipan.com/v2/file/get_office_preview_url"
data["access_token"] = d.AccessToken
case "video_preview":
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
url = "https://api.alipan.com/v2/file/get_video_preview_play_info"
data["category"] = "live_transcoding"
data["url_expire_sec"] = 14400
default:

View File

@ -26,7 +26,7 @@ func (d *AliDrive) createSession() error {
state.retry = 0
return fmt.Errorf("createSession failed after three retries")
}
_, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.alipan.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"deviceName": "samsung",
"modelName": "SM-G9810",
@ -42,7 +42,7 @@ func (d *AliDrive) createSession() error {
}
// func (d *AliDrive) renewSession() error {
// _, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
// _, err, _ := d.request("https://api.alipan.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
// return err
// }
@ -58,7 +58,7 @@ func (d *AliDrive) sign() {
// do others that not defined in Driver interface
func (d *AliDrive) refreshToken() error {
url := "https://auth.aliyundrive.com/v2/account/token"
url := "https://auth.alipan.com/v2/account/token"
var resp base.TokenResp
var e RespErr
_, err := base.RestyClient.R().
@ -85,7 +85,7 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
req := base.RestyClient.R()
state, ok := global.Load(d.UserID)
if !ok {
if url == "https://api.aliyundrive.com/v2/user/get" {
if url == "https://api.alipan.com/v2/user/get" {
state = &State{}
} else {
return nil, fmt.Errorf("can't load user state, user_id: %s", d.UserID), RespErr{}
@ -94,8 +94,8 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
req.SetHeaders(map[string]string{
"Authorization": "Bearer\t" + d.AccessToken,
"content-type": "application/json",
"origin": "https://www.aliyundrive.com",
"Referer": "https://aliyundrive.com/",
"origin": "https://www.alipan.com",
"Referer": "https://alipan.com/",
"X-Signature": state.signature,
"x-request-id": uuid.NewString(),
"X-Canary": "client=Android,app=adrive,version=v4.1.0",
@ -158,7 +158,7 @@ func (d *AliDrive) getFiles(fileId string) ([]File, error) {
"video_thumbnail_process": "video/snapshot,t_0,f_jpg,ar_auto,w_300",
"url_expire_sec": 14400,
}
_, err, _ := d.request("https://api.aliyundrive.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.alipan.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
@ -172,7 +172,7 @@ func (d *AliDrive) getFiles(fileId string) ([]File, error) {
}
func (d *AliDrive) batch(srcId, dstId string, url string) error {
res, err, _ := d.request("https://api.aliyundrive.com/v3/batch", http.MethodPost, func(req *resty.Request) {
res, err, _ := d.request("https://api.alipan.com/v3/batch", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"requests": []base.Json{
{

View File

@ -93,7 +93,7 @@ func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link
}
url = utils.Json.Get(res, "streamsUrl", d.LIVPDownloadFormat).ToString()
}
exp := time.Hour
exp := time.Minute
return &model.Link{
URL: url,
Expiration: &exp,

View File

@ -11,7 +11,7 @@ type Addition struct {
RefreshToken string `json:"refresh_token" required:"true"`
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
OauthTokenURL string `json:"oauth_token_url" default:"https://api.xhofe.top/alist/ali_open/token"`
OauthTokenURL string `json:"oauth_token_url" default:"https://api.nn.ci/alist/ali_open/token"`
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
@ -36,7 +36,7 @@ var config = driver.Config{
func init() {
op.RegisterDriver(func() driver.Driver {
return &AliyundriveOpen{
base: "https://openapi.aliyundrive.com",
base: "https://openapi.alipan.com",
}
})
}

View File

@ -5,7 +5,6 @@ import (
"context"
"encoding/base64"
"fmt"
"github.com/alist-org/alist/v3/pkg/http_range"
"io"
"math"
"net/http"
@ -16,6 +15,7 @@ import (
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/avast/retry-go"
"github.com/go-resty/resty/v2"
@ -258,6 +258,7 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
return nil, err
}
offset += partSize
up(float64(i*100) / float64(count))
}
} else {
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)

View File

@ -86,7 +86,7 @@ func (d *AliyundriveOpen) refreshToken() error {
if err != nil {
return err
}
log.Infof("[ali_open] toekn exchange: %s -> %s", d.RefreshToken, refresh)
log.Infof("[ali_open] token exchange: %s -> %s", d.RefreshToken, refresh)
d.RefreshToken, d.AccessToken = refresh, access
op.MustSaveDriverStorage(d)
return nil

View File

@ -105,7 +105,7 @@ func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Lin
"share_id": d.ShareId,
}
var resp ShareLinkResp
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
_, err := d.request("https://api.alipan.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
})
if err != nil {
@ -113,7 +113,7 @@ func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Lin
}
return &model.Link{
Header: http.Header{
"Referer": []string{"https://www.aliyundrive.com/"},
"Referer": []string{"https://www.alipan.com/"},
},
URL: resp.DownloadUrl,
}, nil
@ -128,9 +128,9 @@ func (d *AliyundriveShare) Other(ctx context.Context, args model.OtherArgs) (int
}
switch args.Method {
case "doc_preview":
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
url = "https://api.alipan.com/v2/file/get_office_preview_url"
case "video_preview":
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
url = "https://api.alipan.com/v2/file/get_video_preview_play_info"
data["category"] = "live_transcoding"
default:
return nil, errs.NotSupport

View File

@ -16,7 +16,7 @@ const (
)
func (d *AliyundriveShare) refreshToken() error {
url := "https://auth.aliyundrive.com/v2/account/token"
url := "https://auth.alipan.com/v2/account/token"
var resp base.TokenResp
var e ErrorResp
_, err := base.RestyClient.R().
@ -47,7 +47,7 @@ func (d *AliyundriveShare) getShareToken() error {
var resp ShareTokenResp
_, err := base.RestyClient.R().
SetResult(&resp).SetError(&e).SetBody(data).
Post("https://api.aliyundrive.com/v2/share_link/get_share_token")
Post("https://api.alipan.com/v2/share_link/get_share_token")
if err != nil {
return err
}
@ -116,7 +116,7 @@ func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
SetHeader("x-share-token", d.ShareToken).
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
SetResult(&resp).SetError(&e).SetBody(data).
Post("https://api.aliyundrive.com/adrive/v3/file/list")
Post("https://api.alipan.com/adrive/v3/file/list")
if err != nil {
return nil, err
}

View File

@ -2,7 +2,9 @@ package drivers
import (
_ "github.com/alist-org/alist/v3/drivers/115"
_ "github.com/alist-org/alist/v3/drivers/115_share"
_ "github.com/alist-org/alist/v3/drivers/123"
_ "github.com/alist-org/alist/v3/drivers/123_link"
_ "github.com/alist-org/alist/v3/drivers/123_share"
_ "github.com/alist-org/alist/v3/drivers/139"
_ "github.com/alist-org/alist/v3/drivers/189"
@ -16,12 +18,14 @@ import (
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
_ "github.com/alist-org/alist/v3/drivers/chaoxing"
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
_ "github.com/alist-org/alist/v3/drivers/crypt"
_ "github.com/alist-org/alist/v3/drivers/dropbox"
_ "github.com/alist-org/alist/v3/drivers/ftp"
_ "github.com/alist-org/alist/v3/drivers/google_drive"
_ "github.com/alist-org/alist/v3/drivers/google_photo"
_ "github.com/alist-org/alist/v3/drivers/ilanzou"
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
_ "github.com/alist-org/alist/v3/drivers/lanzou"
_ "github.com/alist-org/alist/v3/drivers/local"
@ -33,6 +37,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/pikpak"
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
_ "github.com/alist-org/alist/v3/drivers/quqi"
_ "github.com/alist-org/alist/v3/drivers/s3"
_ "github.com/alist-org/alist/v3/drivers/seafile"
_ "github.com/alist-org/alist/v3/drivers/sftp"
@ -44,6 +49,7 @@ import (
_ "github.com/alist-org/alist/v3/drivers/url_tree"
_ "github.com/alist-org/alist/v3/drivers/uss"
_ "github.com/alist-org/alist/v3/drivers/virtual"
_ "github.com/alist-org/alist/v3/drivers/vtencent"
_ "github.com/alist-org/alist/v3/drivers/webdav"
_ "github.com/alist-org/alist/v3/drivers/weiyun"
_ "github.com/alist-org/alist/v3/drivers/wopan"

View File

@ -5,7 +5,6 @@ import (
"crypto/md5"
"encoding/hex"
"errors"
"fmt"
"io"
"math"
"net/url"
@ -28,10 +27,9 @@ type BaiduNetdisk struct {
Addition
uploadThread int
vipType int // 会员类型0普通用户(4G/4M)、1普通会员(10G/16M)、2超级会员(20G/32M)
}
const DefaultSliceSize int64 = 4 * utils.MB
func (d *BaiduNetdisk) Config() driver.Config {
return config
}
@ -54,7 +52,11 @@ func (d *BaiduNetdisk) Init(ctx context.Context) error {
"method": "uinfo",
}, nil)
log.Debugf("[baidu] get uinfo: %s", string(res))
return err
if err != nil {
return err
}
d.vipType = utils.Json.Get(res, "vip_type").ToInt()
return nil
}
func (d *BaiduNetdisk) Drop(ctx context.Context) error {
@ -146,24 +148,50 @@ func (d *BaiduNetdisk) Remove(ctx context.Context, obj model.Obj) error {
return err
}
func (d *BaiduNetdisk) PutRapid(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
contentMd5 := stream.GetHash().GetHash(utils.MD5)
if len(contentMd5) < utils.MD5.Width {
return nil, errors.New("invalid hash")
}
streamSize := stream.GetSize()
path := stdpath.Join(dstDir.GetPath(), stream.GetName())
mtime := stream.ModTime().Unix()
ctime := stream.CreateTime().Unix()
blockList, _ := utils.Json.MarshalToString([]string{contentMd5})
var newFile File
_, err := d.create(path, streamSize, 0, "", blockList, &newFile, mtime, ctime)
if err != nil {
return nil, err
}
return fileToObj(newFile), nil
}
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// rapid upload
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
return newObj, nil
}
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
streamSize := stream.GetSize()
count := int(math.Max(math.Ceil(float64(streamSize)/float64(DefaultSliceSize)), 1))
lastBlockSize := streamSize % DefaultSliceSize
sliceSize := d.getSliceSize()
count := int(math.Max(math.Ceil(float64(streamSize)/float64(sliceSize)), 1))
lastBlockSize := streamSize % sliceSize
if streamSize > 0 && lastBlockSize == 0 {
lastBlockSize = DefaultSliceSize
lastBlockSize = sliceSize
}
//cal md5 for first 256k data
const SliceSize int64 = 256 * 1024
// cal md5
blockList := make([]string, 0, count)
byteSize := DefaultSliceSize
byteSize := sliceSize
fileMd5H := md5.New()
sliceMd5H := md5.New()
sliceMd5H2 := md5.New()
@ -186,9 +214,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
contentMd5 := hex.EncodeToString(fileMd5H.Sum(nil))
sliceMd5 := hex.EncodeToString(sliceMd5H2.Sum(nil))
blockListStr, _ := utils.Json.MarshalToString(blockList)
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
path := encodeURIComponent(rawPath)
path := stdpath.Join(dstDir.GetPath(), stream.GetName())
mtime := stream.ModTime().Unix()
ctime := stream.CreateTime().Unix()
@ -196,13 +222,23 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
// 尝试获取之前的进度
precreateResp, ok := base.GetUploadProgress[*PrecreateResp](d, d.AccessToken, contentMd5)
if !ok {
data := fmt.Sprintf("path=%s&size=%d&isdir=0&autoinit=1&rtype=3&block_list=%s&content-md5=%s&slice-md5=%s&local_mtime=%d&local_ctime=%d",
path, streamSize, blockListStr, contentMd5, sliceMd5, mtime, ctime)
params := map[string]string{
"method": "precreate",
}
log.Debugf("[baidu_netdisk] precreate data: %s", data)
_, err = d.post("/xpan/file", params, data, &precreateResp)
form := map[string]string{
"path": path,
"size": strconv.FormatInt(streamSize, 10),
"isdir": "0",
"autoinit": "1",
"rtype": "3",
"block_list": blockListStr,
"content-md5": contentMd5,
"slice-md5": sliceMd5,
}
joinTime(form, ctime, mtime)
log.Debugf("[baidu_netdisk] precreate data: %s", form)
_, err = d.postForm("/xpan/file", params, form, &precreateResp)
if err != nil {
return nil, err
}
@ -225,7 +261,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
break
}
i, partseq, offset, byteSize := i, partseq, int64(partseq)*DefaultSliceSize, DefaultSliceSize
i, partseq, offset, byteSize := i, partseq, int64(partseq)*sliceSize, sliceSize
if partseq+1 == count {
byteSize = lastBlockSize
}
@ -242,7 +278,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
if err != nil {
return err
}
up(int(threadG.Success()) * 100 / len(precreateResp.BlockList))
up(float64(threadG.Success()) * 100 / float64(len(precreateResp.BlockList)))
precreateResp.BlockList[i] = -1
return nil
})
@ -258,12 +294,13 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
// step.3 创建文件
var newFile File
_, err = d.create(rawPath, streamSize, 0, precreateResp.Uploadid, blockListStr, &newFile, mtime, ctime)
_, err = d.create(path, streamSize, 0, precreateResp.Uploadid, blockListStr, &newFile, mtime, ctime)
if err != nil {
return nil, err
}
return fileToObj(newFile), nil
}
func (d *BaiduNetdisk) uploadSlice(ctx context.Context, params map[string]string, fileName string, file io.Reader) error {
res, err := base.RestyClient.R().
SetContext(ctx).

View File

@ -1,7 +1,6 @@
package baidu_netdisk
import (
"github.com/alist-org/alist/v3/pkg/utils"
"path"
"strconv"
"time"
@ -71,7 +70,9 @@ func fileToObj(f File) *model.ObjThumb {
Modified: time.Unix(f.LocalMtime, 0),
Ctime: time.Unix(f.LocalCtime, 0),
IsFolder: f.Isdir == 1,
HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
// 直接获取的MD5是错误的
// HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
}

View File

@ -1,11 +1,10 @@
package baidu_netdisk
import (
"errors"
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
@ -22,7 +21,7 @@ import (
func (d *BaiduNetdisk) refreshToken() error {
err := d._refreshToken()
if err != nil && err == errs.EmptyToken {
if err != nil && errors.Is(err, errs.EmptyToken) {
err = d._refreshToken()
}
return err
@ -74,21 +73,16 @@ func (d *BaiduNetdisk) request(furl string, method string, callback base.ReqCall
log.Info("refreshing baidu_netdisk token.")
err2 := d.refreshToken()
if err2 != nil {
return err2
return retry.Unrecoverable(err2)
}
}
err2 := fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
if !utils.SliceContains([]int{2}, errno) {
err2 = retry.Unrecoverable(err2)
}
return err2
return fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
}
result = res.Body()
return nil
},
retry.LastErrorOnly(true),
retry.Attempts(5),
retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
return result, err
@ -100,10 +94,10 @@ func (d *BaiduNetdisk) get(pathname string, params map[string]string, resp inter
}, resp)
}
func (d *BaiduNetdisk) post(pathname string, params map[string]string, data interface{}, resp interface{}) ([]byte, error) {
func (d *BaiduNetdisk) postForm(pathname string, params map[string]string, form map[string]string, resp interface{}) ([]byte, error) {
return d.request("https://pan.baidu.com/rest/2.0"+pathname, http.MethodPost, func(req *resty.Request) {
req.SetQueryParams(params)
req.SetBody(data)
req.SetFormData(form)
}, resp)
}
@ -158,6 +152,9 @@ func (d *BaiduNetdisk) linkOfficial(file model.Obj, args model.LinkArgs) (*model
//if res.StatusCode() == 302 {
u = res.Header().Get("location")
//}
updateObjMd5(file, "pan.baidu.com", u)
return &model.Link{
URL: u,
Header: http.Header{
@ -180,6 +177,9 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
if err != nil {
return nil, err
}
updateObjMd5(file, d.CustomCrackUA, resp.Info[0].Dlink)
return &model.Link{
URL: resp.Info[0].Dlink,
Header: http.Header{
@ -194,29 +194,73 @@ func (d *BaiduNetdisk) manage(opera string, filelist any) ([]byte, error) {
"opera": opera,
}
marshal, _ := utils.Json.MarshalToString(filelist)
data := fmt.Sprintf("async=0&filelist=%s&ondup=fail", marshal)
return d.post("/xpan/file", params, data, nil)
return d.postForm("/xpan/file", params, map[string]string{
"async": "0",
"filelist": marshal,
"ondup": "fail",
}, nil)
}
func (d *BaiduNetdisk) create(path string, size int64, isdir int, uploadid, block_list string, resp any, mtime, ctime int64) ([]byte, error) {
params := map[string]string{
"method": "create",
}
data := ""
if mtime == 0 || ctime == 0 {
data = fmt.Sprintf("path=%s&size=%d&isdir=%d&rtype=3", encodeURIComponent(path), size, isdir)
} else {
data = fmt.Sprintf("path=%s&size=%d&isdir=%d&rtype=3&local_mtime=%d&local_ctime=%d", encodeURIComponent(path), size, isdir, mtime, ctime)
form := map[string]string{
"path": path,
"size": strconv.FormatInt(size, 10),
"isdir": strconv.Itoa(isdir),
"rtype": "3",
}
if mtime != 0 && ctime != 0 {
joinTime(form, ctime, mtime)
}
if uploadid != "" {
data += fmt.Sprintf("&uploadid=%s&block_list=%s", uploadid, block_list)
form["uploadid"] = uploadid
}
return d.post("/xpan/file", params, data, resp)
if block_list != "" {
form["block_list"] = block_list
}
return d.postForm("/xpan/file", params, form, resp)
}
func encodeURIComponent(str string) string {
r := url.QueryEscape(str)
r = strings.ReplaceAll(r, "+", "%20")
return r
func joinTime(form map[string]string, ctime, mtime int64) {
form["local_mtime"] = strconv.FormatInt(mtime, 10)
form["local_ctime"] = strconv.FormatInt(ctime, 10)
}
func updateObjMd5(obj model.Obj, userAgent, u string) {
object := model.GetRawObject(obj)
if object != nil {
req, _ := http.NewRequest(http.MethodHead, u, nil)
req.Header.Add("User-Agent", userAgent)
resp, _ := base.HttpClient.Do(req)
if resp != nil {
contentMd5 := resp.Header.Get("Content-Md5")
object.HashInfo = utils.NewHashInfo(utils.MD5, contentMd5)
}
}
}
const (
DefaultSliceSize int64 = 4 * utils.MB
VipSliceSize = 16 * utils.MB
SVipSliceSize = 32 * utils.MB
)
func (d *BaiduNetdisk) getSliceSize() int64 {
switch d.vipType {
case 1:
return VipSliceSize
case 2:
return SVipSliceSize
default:
return DefaultSliceSize
}
}
// func encodeURIComponent(str string) string {
// r := url.QueryEscape(str)
// r = strings.ReplaceAll(r, "+", "%20")
// return r
// }

View File

@ -227,14 +227,14 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
return nil, fmt.Errorf("file size cannot be zero")
}
// TODO:
// 暂时没有找到妙传方式
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
const DEFAULT int64 = 1 << 22
const SliceSize int64 = 1 << 18
@ -329,7 +329,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
if err != nil {
return err
}
up(int(threadG.Success()) * 100 / len(precreateResp.BlockList))
up(float64(threadG.Success()) * 100 / float64(len(precreateResp.BlockList)))
precreateResp.BlockList[i] = -1
return nil
})

View File

@ -2,9 +2,10 @@ package baiduphoto
import (
"fmt"
"github.com/alist-org/alist/v3/pkg/utils"
"time"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/internal/model"
)
@ -52,34 +53,26 @@ type (
Ctime int64 `json:"ctime"` // 创建时间 s
Mtime int64 `json:"mtime"` // 修改时间 s
Thumburl []string `json:"thumburl"`
parseTime *time.Time
Md5 string `json:"md5"`
}
)
func (c *File) GetSize() int64 { return c.Size }
func (c *File) GetName() string { return getFileName(c.Path) }
func (c *File) ModTime() time.Time {
if c.parseTime == nil {
c.parseTime = toTime(c.Mtime)
}
return *c.parseTime
}
func (c *File) IsDir() bool { return false }
func (c *File) GetID() string { return "" }
func (c *File) GetPath() string { return "" }
func (c *File) GetSize() int64 { return c.Size }
func (c *File) GetName() string { return getFileName(c.Path) }
func (c *File) CreateTime() time.Time { return time.Unix(c.Ctime, 0) }
func (c *File) ModTime() time.Time { return time.Unix(c.Mtime, 0) }
func (c *File) IsDir() bool { return false }
func (c *File) GetID() string { return "" }
func (c *File) GetPath() string { return "" }
func (c *File) Thumb() string {
if len(c.Thumburl) > 0 {
return c.Thumburl[0]
}
return ""
}
func (c *File) CreateTime() time.Time {
return time.Unix(c.Ctime, 0)
}
func (c *File) GetHash() utils.HashInfo {
return utils.HashInfo{}
return utils.NewHashInfo(utils.MD5, c.Md5)
}
/*相册部分*/
@ -117,25 +110,17 @@ type (
}
)
func (a *Album) CreateTime() time.Time {
return time.Unix(a.CreationTime, 0)
}
func (a *Album) GetHash() utils.HashInfo {
return utils.HashInfo{}
}
func (a *Album) GetSize() int64 { return 0 }
func (a *Album) GetName() string { return a.Title }
func (a *Album) ModTime() time.Time {
if a.parseTime == nil {
a.parseTime = toTime(a.Mtime)
}
return *a.parseTime
}
func (a *Album) IsDir() bool { return true }
func (a *Album) GetID() string { return "" }
func (a *Album) GetPath() string { return "" }
func (a *Album) GetSize() int64 { return 0 }
func (a *Album) GetName() string { return a.Title }
func (a *Album) CreateTime() time.Time { return time.Unix(a.CreationTime, 0) }
func (a *Album) ModTime() time.Time { return time.Unix(a.Mtime, 0) }
func (a *Album) IsDir() bool { return true }
func (a *Album) GetID() string { return "" }
func (a *Album) GetPath() string { return "" }
type (
CopyFileResp struct {

View File

@ -33,6 +33,7 @@ func NewRestyClient() *resty.Client {
client := resty.New().
SetHeader("user-agent", UserAgent).
SetRetryCount(3).
SetRetryResetReaders(true).
SetTimeout(DefaultTimeout).
SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
return client

297
drivers/chaoxing/driver.go Normal file
View File

@ -0,0 +1,297 @@
package chaoxing
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"mime/multipart"
"net/http"
"net/url"
"strings"
"time"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/cron"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
"google.golang.org/appengine/log"
)
type ChaoXing struct {
model.Storage
Addition
cron *cron.Cron
config driver.Config
conf Conf
}
func (d *ChaoXing) Config() driver.Config {
return d.config
}
func (d *ChaoXing) GetAddition() driver.Additional {
return &d.Addition
}
func (d *ChaoXing) refreshCookie() error {
cookie, err := d.Login()
if err != nil {
d.Status = err.Error()
op.MustSaveDriverStorage(d)
return nil
}
d.Addition.Cookie = cookie
op.MustSaveDriverStorage(d)
return nil
}
func (d *ChaoXing) Init(ctx context.Context) error {
err := d.refreshCookie()
if err != nil {
log.Errorf(ctx, err.Error())
}
d.cron = cron.NewCron(time.Hour * 12)
d.cron.Do(func() {
err = d.refreshCookie()
if err != nil {
log.Errorf(ctx, err.Error())
}
})
return nil
}
func (d *ChaoXing) Drop(ctx context.Context) error {
d.cron.Stop()
return nil
}
func (d *ChaoXing) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.GetFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
func (d *ChaoXing) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp DownResp
ua := d.conf.ua
fileId := strings.Split(file.GetID(), "$")[1]
_, err := d.requestDownload("/screen/note_note/files/status/"+fileId, http.MethodPost, func(req *resty.Request) {
req.SetHeader("User-Agent", ua)
}, &resp)
if err != nil {
return nil, err
}
u := resp.Download
return &model.Link{
URL: u,
Header: http.Header{
"Cookie": []string{d.Cookie},
"Referer": []string{d.conf.referer},
"User-Agent": []string{ua},
},
Concurrency: 2,
PartSize: 10 * utils.MB,
}, nil
}
func (d *ChaoXing) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"name": dirName,
"pid": parentDir.GetID(),
}
var resp ListFileResp
_, err := d.request("/pc/resource/addResourceFolder", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return err
}
if resp.Result != 1 {
msg := fmt.Sprintf("error:%s", resp.Msg)
return errors.New(msg)
}
return nil
}
func (d *ChaoXing) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderIds": srcObj.GetID(),
"targetId": dstDir.GetID(),
}
if !srcObj.IsDir() {
query = map[string]string{
"bbsid": d.Addition.Bbsid,
"recIds": strings.Split(srcObj.GetID(), "$")[0],
"targetId": dstDir.GetID(),
}
}
var resp ListFileResp
_, err := d.request("/pc/resource/moveResource", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return err
}
if !resp.Status {
msg := fmt.Sprintf("error:%s", resp.Msg)
return errors.New(msg)
}
return nil
}
func (d *ChaoXing) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderId": srcObj.GetID(),
"name": newName,
}
path := "/pc/resource/updateResourceFolderName"
if !srcObj.IsDir() {
// path = "/pc/resource/updateResourceFileName"
// query = map[string]string{
// "bbsid": d.Addition.Bbsid,
// "recIds": strings.Split(srcObj.GetID(), "$")[0],
// "name": newName,
// }
return errors.New("此网盘不支持修改文件名")
}
var resp ListFileResp
_, err := d.request(path, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return err
}
if resp.Result != 1 {
msg := fmt.Sprintf("error:%s", resp.Msg)
return errors.New(msg)
}
return nil
}
func (d *ChaoXing) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
// TODO copy obj, optional
return errs.NotImplement
}
func (d *ChaoXing) Remove(ctx context.Context, obj model.Obj) error {
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderIds": obj.GetID(),
}
path := "/pc/resource/deleteResourceFolder"
var resp ListFileResp
if !obj.IsDir() {
path = "/pc/resource/deleteResourceFile"
query = map[string]string{
"bbsid": d.Addition.Bbsid,
"recIds": strings.Split(obj.GetID(), "$")[0],
}
}
_, err := d.request(path, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return err
}
if resp.Result != 1 {
msg := fmt.Sprintf("error:%s", resp.Msg)
return errors.New(msg)
}
return nil
}
func (d *ChaoXing) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
var resp UploadDataRsp
_, err := d.request("https://noteyd.chaoxing.com/pc/files/getUploadConfig", http.MethodGet, func(req *resty.Request) {
}, &resp)
if err != nil {
return err
}
if resp.Result != 1 {
return errors.New("get upload data error")
}
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
filePart, err := writer.CreateFormFile("file", stream.GetName())
if err != nil {
return err
}
_, err = io.Copy(filePart, stream)
if err != nil {
return err
}
err = writer.WriteField("_token", resp.Msg.Token)
if err != nil {
return err
}
err = writer.WriteField("puid", fmt.Sprintf("%d", resp.Msg.Puid))
if err != nil {
fmt.Println("Error writing param2 to request body:", err)
return err
}
err = writer.Close()
if err != nil {
return err
}
req, err := http.NewRequest("POST", "https://pan-yz.chaoxing.com/upload", body)
if err != nil {
return err
}
req.Header.Set("Content-Type", writer.FormDataContentType())
req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
resps, err := http.DefaultClient.Do(req)
if err != nil {
return err
}
defer resps.Body.Close()
bodys, err := io.ReadAll(resps.Body)
if err != nil {
return err
}
var fileRsp UploadFileDataRsp
err = json.Unmarshal(bodys, &fileRsp)
if err != nil {
return err
}
if fileRsp.Msg != "success" {
return errors.New(fileRsp.Msg)
}
uploadDoneParam := UploadDoneParam{Key: fileRsp.ObjectID, Cataid: "100000019", Param: fileRsp.Data}
params, err := json.Marshal(uploadDoneParam)
if err != nil {
return err
}
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"pid": dstDir.GetID(),
"type": "yunpan",
"params": url.QueryEscape("[" + string(params) + "]"),
}
var respd ListFileResp
_, err = d.request("/pc/resource/addResource", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &respd)
if err != nil {
return err
}
if respd.Result != 1 {
msg := fmt.Sprintf("error:%v", resp.Msg)
return errors.New(msg)
}
return nil
}
var _ driver.Driver = (*ChaoXing)(nil)

47
drivers/chaoxing/meta.go Normal file
View File

@ -0,0 +1,47 @@
package chaoxing
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
// 此程序挂载的是超星小组网盘,需要代理才能使用;
// 登录超星后进入个人空间,进入小组,新建小组,点击进去。
// url中就有bbsid的参数系统限制单文件大小2G没有总容量限制
type Addition struct {
// 超星用户名及密码
UserName string `json:"user_name" required:"true"`
Password string `json:"password" required:"true"`
// 从自己新建的小组url里获取
Bbsid string `json:"bbsid" required:"true"`
driver.RootID
// 可不填,程序会自动登录获取
Cookie string `json:"cookie"`
}
type Conf struct {
ua string
referer string
api string
DowloadApi string
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &ChaoXing{
config: driver.Config{
Name: "ChaoXingGroupDrive",
OnlyProxy: true,
OnlyLocal: false,
DefaultRoot: "-1",
NoOverwriteUpload: true,
},
conf: Conf{
ua: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch",
referer: "https://chaoxing.com/",
api: "https://groupweb.chaoxing.com",
DowloadApi: "https://noteyd.chaoxing.com",
},
}
})
}

276
drivers/chaoxing/types.go Normal file
View File

@ -0,0 +1,276 @@
package chaoxing
import (
"bytes"
"fmt"
"strconv"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type Resp struct {
Result int `json:"result"`
}
type UserAuth struct {
GroupAuth struct {
AddData int `json:"addData"`
AddDataFolder int `json:"addDataFolder"`
AddLebel int `json:"addLebel"`
AddManager int `json:"addManager"`
AddMem int `json:"addMem"`
AddTopicFolder int `json:"addTopicFolder"`
AnonymousAddReply int `json:"anonymousAddReply"`
AnonymousAddTopic int `json:"anonymousAddTopic"`
BatchOperation int `json:"batchOperation"`
DelData int `json:"delData"`
DelDataFolder int `json:"delDataFolder"`
DelMem int `json:"delMem"`
DelTopicFolder int `json:"delTopicFolder"`
Dismiss int `json:"dismiss"`
ExamEnc string `json:"examEnc"`
GroupChat int `json:"groupChat"`
IsShowCircleChatButton int `json:"isShowCircleChatButton"`
IsShowCircleCloudButton int `json:"isShowCircleCloudButton"`
IsShowCompanyButton int `json:"isShowCompanyButton"`
Join int `json:"join"`
MemberShowRankSet int `json:"memberShowRankSet"`
ModifyDataFolder int `json:"modifyDataFolder"`
ModifyExpose int `json:"modifyExpose"`
ModifyName int `json:"modifyName"`
ModifyShowPic int `json:"modifyShowPic"`
ModifyTopicFolder int `json:"modifyTopicFolder"`
ModifyVisibleState int `json:"modifyVisibleState"`
OnlyMgrScoreSet int `json:"onlyMgrScoreSet"`
Quit int `json:"quit"`
SendNotice int `json:"sendNotice"`
ShowActivityManage int `json:"showActivityManage"`
ShowActivitySet int `json:"showActivitySet"`
ShowAttentionSet int `json:"showAttentionSet"`
ShowAutoClearStatus int `json:"showAutoClearStatus"`
ShowBarcode int `json:"showBarcode"`
ShowChatRoomSet int `json:"showChatRoomSet"`
ShowCircleActivitySet int `json:"showCircleActivitySet"`
ShowCircleSet int `json:"showCircleSet"`
ShowCmem int `json:"showCmem"`
ShowDataFolder int `json:"showDataFolder"`
ShowDelReason int `json:"showDelReason"`
ShowForward int `json:"showForward"`
ShowGroupChat int `json:"showGroupChat"`
ShowGroupChatSet int `json:"showGroupChatSet"`
ShowGroupSquareSet int `json:"showGroupSquareSet"`
ShowLockAddSet int `json:"showLockAddSet"`
ShowManager int `json:"showManager"`
ShowManagerIdentitySet int `json:"showManagerIdentitySet"`
ShowNeedDelReasonSet int `json:"showNeedDelReasonSet"`
ShowNotice int `json:"showNotice"`
ShowOnlyManagerReplySet int `json:"showOnlyManagerReplySet"`
ShowRank int `json:"showRank"`
ShowRank2 int `json:"showRank2"`
ShowRecycleBin int `json:"showRecycleBin"`
ShowReplyByClass int `json:"showReplyByClass"`
ShowReplyNeedCheck int `json:"showReplyNeedCheck"`
ShowSignbanSet int `json:"showSignbanSet"`
ShowSpeechSet int `json:"showSpeechSet"`
ShowTopicCheck int `json:"showTopicCheck"`
ShowTopicNeedCheck int `json:"showTopicNeedCheck"`
ShowTransferSet int `json:"showTransferSet"`
} `json:"groupAuth"`
OperationAuth struct {
Add int `json:"add"`
AddTopicToFolder int `json:"addTopicToFolder"`
ChoiceSet int `json:"choiceSet"`
DelTopicFromFolder int `json:"delTopicFromFolder"`
Delete int `json:"delete"`
Reply int `json:"reply"`
ScoreSet int `json:"scoreSet"`
TopSet int `json:"topSet"`
Update int `json:"update"`
} `json:"operationAuth"`
}
// 手机端学习通上传的文件的json内容(content字段)与网页端上传的有所不同
// 网页端json `"puid": 54321, "size": 12345`
// 手机端json `"puid": "54321". "size": "12345"`
type int_str int
// json 字符串数字和纯数字解析
func (ios *int_str) UnmarshalJSON(data []byte) error {
intValue, err := strconv.Atoi(string(bytes.Trim(data, "\"")))
if err != nil {
return err
}
*ios = int_str(intValue)
return nil
}
type File struct {
Cataid int `json:"cataid"`
Cfid int `json:"cfid"`
Content struct {
Cfid int `json:"cfid"`
Pid int `json:"pid"`
FolderName string `json:"folderName"`
ShareType int `json:"shareType"`
Preview string `json:"preview"`
Filetype string `json:"filetype"`
PreviewURL string `json:"previewUrl"`
IsImg bool `json:"isImg"`
ParentPath string `json:"parentPath"`
Icon string `json:"icon"`
Suffix string `json:"suffix"`
Duration int `json:"duration"`
Pantype string `json:"pantype"`
Puid int_str `json:"puid"`
Filepath string `json:"filepath"`
Crc string `json:"crc"`
Isfile bool `json:"isfile"`
Residstr string `json:"residstr"`
ObjectID string `json:"objectId"`
Extinfo string `json:"extinfo"`
Thumbnail string `json:"thumbnail"`
Creator int `json:"creator"`
ResTypeValue int `json:"resTypeValue"`
UploadDateFormat string `json:"uploadDateFormat"`
DisableOpt bool `json:"disableOpt"`
DownPath string `json:"downPath"`
Sort int `json:"sort"`
Topsort int `json:"topsort"`
Restype string `json:"restype"`
Size int_str `json:"size"`
UploadDate int64 `json:"uploadDate"`
FileSize string `json:"fileSize"`
Name string `json:"name"`
FileID string `json:"fileId"`
} `json:"content"`
CreatorID int `json:"creatorId"`
DesID string `json:"des_id"`
ID int `json:"id"`
Inserttime int64 `json:"inserttime"`
Key string `json:"key"`
Norder int `json:"norder"`
OwnerID int `json:"ownerId"`
OwnerType int `json:"ownerType"`
Path string `json:"path"`
Rid int `json:"rid"`
Status int `json:"status"`
Topsign int `json:"topsign"`
}
type ListFileResp struct {
Msg string `json:"msg"`
Result int `json:"result"`
Status bool `json:"status"`
UserAuth UserAuth `json:"userAuth"`
List []File `json:"list"`
}
type DownResp struct {
Msg string `json:"msg"`
Duration int `json:"duration"`
Download string `json:"download"`
FileStatus string `json:"fileStatus"`
URL string `json:"url"`
Status bool `json:"status"`
}
type UploadDataRsp struct {
Result int `json:"result"`
Msg struct {
Puid int `json:"puid"`
Token string `json:"token"`
} `json:"msg"`
}
type UploadFileDataRsp struct {
Result bool `json:"result"`
Msg string `json:"msg"`
Crc string `json:"crc"`
ObjectID string `json:"objectId"`
Resid int64 `json:"resid"`
Puid int `json:"puid"`
Data struct {
DisableOpt bool `json:"disableOpt"`
Resid int64 `json:"resid"`
Crc string `json:"crc"`
Puid int `json:"puid"`
Isfile bool `json:"isfile"`
Pantype string `json:"pantype"`
Size int `json:"size"`
Name string `json:"name"`
ObjectID string `json:"objectId"`
Restype string `json:"restype"`
UploadDate time.Time `json:"uploadDate"`
ModifyDate time.Time `json:"modifyDate"`
UploadDateFormat string `json:"uploadDateFormat"`
Residstr string `json:"residstr"`
Suffix string `json:"suffix"`
Preview string `json:"preview"`
Thumbnail string `json:"thumbnail"`
Creator int `json:"creator"`
Duration int `json:"duration"`
IsImg bool `json:"isImg"`
PreviewURL string `json:"previewUrl"`
Filetype string `json:"filetype"`
Filepath string `json:"filepath"`
Sort int `json:"sort"`
Topsort int `json:"topsort"`
ResTypeValue int `json:"resTypeValue"`
Extinfo string `json:"extinfo"`
} `json:"data"`
}
type UploadDoneParam struct {
Cataid string `json:"cataid"`
Key string `json:"key"`
Param struct {
DisableOpt bool `json:"disableOpt"`
Resid int64 `json:"resid"`
Crc string `json:"crc"`
Puid int `json:"puid"`
Isfile bool `json:"isfile"`
Pantype string `json:"pantype"`
Size int `json:"size"`
Name string `json:"name"`
ObjectID string `json:"objectId"`
Restype string `json:"restype"`
UploadDate time.Time `json:"uploadDate"`
ModifyDate time.Time `json:"modifyDate"`
UploadDateFormat string `json:"uploadDateFormat"`
Residstr string `json:"residstr"`
Suffix string `json:"suffix"`
Preview string `json:"preview"`
Thumbnail string `json:"thumbnail"`
Creator int `json:"creator"`
Duration int `json:"duration"`
IsImg bool `json:"isImg"`
PreviewURL string `json:"previewUrl"`
Filetype string `json:"filetype"`
Filepath string `json:"filepath"`
Sort int `json:"sort"`
Topsort int `json:"topsort"`
ResTypeValue int `json:"resTypeValue"`
Extinfo string `json:"extinfo"`
} `json:"param"`
}
func fileToObj(f File) *model.Object {
if len(f.Content.FolderName) > 0 {
return &model.Object{
ID: fmt.Sprintf("%d", f.ID),
Name: f.Content.FolderName,
Size: 0,
Modified: time.UnixMilli(f.Inserttime),
IsFolder: true,
}
}
paserTime := time.UnixMilli(f.Content.UploadDate)
return &model.Object{
ID: fmt.Sprintf("%d$%s", f.ID, f.Content.FileID),
Name: f.Content.Name,
Size: int64(f.Content.Size),
Modified: paserTime,
IsFolder: false,
}
}

183
drivers/chaoxing/util.go Normal file
View File

@ -0,0 +1,183 @@
package chaoxing
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"encoding/base64"
"errors"
"fmt"
"mime/multipart"
"net/http"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/go-resty/resty/v2"
)
func (d *ChaoXing) requestDownload(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
u := d.conf.DowloadApi + pathname
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
"Accept": "application/json, text/plain, */*",
"Referer": d.conf.referer,
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e Resp
req.SetError(&e)
res, err := req.Execute(method, u)
if err != nil {
return nil, err
}
return res.Body(), nil
}
func (d *ChaoXing) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
u := d.conf.api + pathname
if strings.Contains(pathname, "getUploadConfig") {
u = pathname
}
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
"Accept": "application/json, text/plain, */*",
"Referer": d.conf.referer,
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e Resp
req.SetError(&e)
res, err := req.Execute(method, u)
if err != nil {
return nil, err
}
return res.Body(), nil
}
func (d *ChaoXing) GetFiles(parent string) ([]File, error) {
files := make([]File, 0)
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderId": parent,
"recType": "1",
}
var resp ListFileResp
_, err := d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
if resp.Result != 1 {
msg := fmt.Sprintf("error code is:%d", resp.Result)
return nil, errors.New(msg)
}
if len(resp.List) > 0 {
files = append(files, resp.List...)
}
querys := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderId": parent,
"recType": "2",
}
var resps ListFileResp
_, err = d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(querys)
}, &resps)
if err != nil {
return nil, err
}
for _, file := range resps.List {
// 手机端超星上传的文件没有fileID字段但ObjectID与fileID相同可代替
if file.Content.FileID == "" {
file.Content.FileID = file.Content.ObjectID
}
files = append(files, file)
}
return files, nil
}
func EncryptByAES(message, key string) (string, error) {
aesKey := []byte(key)
plainText := []byte(message)
block, err := aes.NewCipher(aesKey)
if err != nil {
return "", err
}
iv := aesKey[:aes.BlockSize]
mode := cipher.NewCBCEncrypter(block, iv)
padding := aes.BlockSize - len(plainText)%aes.BlockSize
paddedText := append(plainText, byte(padding))
for i := 0; i < padding-1; i++ {
paddedText = append(paddedText, byte(padding))
}
ciphertext := make([]byte, len(paddedText))
mode.CryptBlocks(ciphertext, paddedText)
encrypted := base64.StdEncoding.EncodeToString(ciphertext)
return encrypted, nil
}
func CookiesToString(cookies []*http.Cookie) string {
var cookieStr string
for _, cookie := range cookies {
cookieStr += cookie.Name + "=" + cookie.Value + "; "
}
if len(cookieStr) > 2 {
cookieStr = cookieStr[:len(cookieStr)-2]
}
return cookieStr
}
func (d *ChaoXing) Login() (string, error) {
transferKey := "u2oh6Vu^HWe4_AES"
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
uname, err := EncryptByAES(d.Addition.UserName, transferKey)
if err != nil {
return "", err
}
password, err := EncryptByAES(d.Addition.Password, transferKey)
if err != nil {
return "", err
}
err = writer.WriteField("uname", uname)
if err != nil {
return "", err
}
err = writer.WriteField("password", password)
if err != nil {
return "", err
}
err = writer.WriteField("t", "true")
if err != nil {
return "", err
}
err = writer.Close()
if err != nil {
return "", err
}
// Create the request
req, err := http.NewRequest("POST", "https://passport2.chaoxing.com/fanyalogin", body)
if err != nil {
return "", err
}
req.Header.Set("Content-Type", writer.FormDataContentType())
req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
resp, err := http.DefaultClient.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
return CookiesToString(resp.Cookies()), nil
}

View File

@ -49,7 +49,19 @@ func (d *Cloudreve) List(ctx context.Context, dir model.Obj, args model.ListArgs
}
return utils.SliceConvert(r.Objects, func(src Object) (model.Obj, error) {
return objectToObj(src), nil
thumb, err := d.GetThumb(src)
if err != nil {
return nil, err
}
if src.Type == "dir" && d.EnableThumbAndFolderSize {
var dprop DirectoryProp
err = d.request(http.MethodGet, "/object/property/"+src.Id+"?is_folder=true", nil, &dprop)
if err != nil {
return nil, err
}
src.Size = dprop.Size
}
return objectToObj(src, thumb), nil
})
}

View File

@ -9,11 +9,12 @@ type Addition struct {
// Usually one of two
driver.RootPath
// define other
Address string `json:"address" required:"true"`
Username string `json:"username"`
Password string `json:"password"`
Cookie string `json:"cookie"`
CustomUA string `json:"custom_ua"`
Address string `json:"address" required:"true"`
Username string `json:"username"`
Password string `json:"password"`
Cookie string `json:"cookie"`
CustomUA string `json:"custom_ua"`
EnableThumbAndFolderSize bool `json:"enable_thumb_and_folder_size"`
}
var config = driver.Config{

View File

@ -44,13 +44,20 @@ type Object struct {
SourceEnabled bool `json:"source_enabled"`
}
func objectToObj(f Object) *model.Object {
return &model.Object{
ID: f.Id,
Name: f.Name,
Size: int64(f.Size),
Modified: f.Date,
IsFolder: f.Type == "dir",
type DirectoryProp struct {
Size int `json:"size"`
}
func objectToObj(f Object, t model.Thumbnail) *model.ObjThumb {
return &model.ObjThumb{
Object: model.Object{
ID: f.Id,
Name: f.Name,
Size: int64(f.Size),
Modified: f.Date,
IsFolder: f.Type == "dir",
},
Thumbnail: t,
}
}

View File

@ -149,3 +149,26 @@ func convertSrc(obj model.Obj) map[string]interface{} {
m["items"] = items
return m
}
func (d *Cloudreve) GetThumb(file Object) (model.Thumbnail, error) {
if !d.Addition.EnableThumbAndFolderSize {
return model.Thumbnail{}, nil
}
ua := d.CustomUA
if ua == "" {
ua = base.UserAgent
}
req := base.NoRedirectClient.R()
req.SetHeaders(map[string]string{
"Cookie": "cloudreve-session=" + d.Cookie,
"Accept": "image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8",
"User-Agent": ua,
})
resp, err := req.Execute(http.MethodGet, d.Address+"/api/v3/file/thumb/"+file.Id)
if err != nil {
return model.Thumbnail{}, err
}
return model.Thumbnail{
Thumbnail: resp.Header().Get("Location"),
}, nil
}

View File

@ -16,6 +16,7 @@ import (
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server/common"
rcCrypt "github.com/rclone/rclone/backend/crypt"
"github.com/rclone/rclone/fs/config/configmap"
"github.com/rclone/rclone/fs/config/obscure"
@ -123,6 +124,9 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
//filter illegal files
continue
}
if !d.ShowHidden && strings.HasPrefix(name, ".") {
continue
}
objRes := model.Object{
Name: name,
Size: 0,
@ -144,6 +148,9 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
//filter illegal files
continue
}
if !d.ShowHidden && strings.HasPrefix(name, ".") {
continue
}
objRes := model.Object{
Name: name,
Size: size,
@ -152,7 +159,10 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
Ctime: obj.CreateTime(),
// discarding hash as it's encrypted
}
if !ok {
if d.Thumbnail && thumb == "" {
thumb = utils.EncodePath(common.GetApiUrl(nil) + stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
}
if !ok && !d.Thumbnail {
result = append(result, &objRes)
} else {
objWithThumb := model.ObjThumb{

View File

@ -19,6 +19,10 @@ type Addition struct {
Salt string `json:"salt" confidential:"true" help:"If you don't know what is salt, treat it as a second password. Optional but recommended"`
EncryptedSuffix string `json:"encrypted_suffix" required:"true" default:".bin" help:"for advanced user only! encrypted files will have this suffix"`
FileNameEncoding string `json:"filename_encoding" type:"select" required:"true" options:"base64,base32,base32768" default:"base64" help:"for advanced user only!"`
Thumbnail bool `json:"thumbnail" required:"true" default:"false" help:"enable thumbnail which pre-generated under .thumbnails folder"`
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
}
var config = driver.Config{

View File

@ -45,7 +45,25 @@ func (d *Dropbox) Init(ctx context.Context) error {
if result != query {
return fmt.Errorf("failed to check user: %s", string(res))
}
return nil
d.RootNamespaceId, err = d.GetRootNamespaceId(ctx)
return err
}
func (d *Dropbox) GetRootNamespaceId(ctx context.Context) (string, error) {
res, err := d.request("/2/users/get_current_account", http.MethodPost, func(req *resty.Request) {
req.SetBody(nil)
})
if err != nil {
return "", err
}
var currentAccountResp CurrentAccountResp
err = utils.Json.Unmarshal(res, &currentAccountResp)
if err != nil {
return "", err
}
rootNamespaceId := currentAccountResp.RootInfo.RootNamespaceId
return rootNamespaceId, nil
}
func (d *Dropbox) Drop(ctx context.Context) error {
@ -203,7 +221,7 @@ func (d *Dropbox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
_ = res.Body.Close()
if count > 0 {
up((i + 1) * 100 / count)
up(float64(i+1) * 100 / float64(count))
}
offset += byteSize

View File

@ -17,7 +17,8 @@ type Addition struct {
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
AccessToken string
AccessToken string
RootNamespaceId string
}
var config = driver.Config{

View File

@ -23,6 +23,13 @@ type RefreshTokenErrorResp struct {
ErrorDescription string `json:"error_description"`
}
type CurrentAccountResp struct {
RootInfo struct {
RootNamespaceId string `json:"root_namespace_id"`
HomeNamespaceId string `json:"home_namespace_id"`
} `json:"root_info"`
}
type File struct {
Tag string `json:".tag"`
Name string `json:"name"`

View File

@ -46,12 +46,22 @@ func (d *Dropbox) refreshToken() error {
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
if method == http.MethodPost {
req.SetHeader("Content-Type", "application/json")
if d.RootNamespaceId != "" {
apiPathRootJson, err := utils.Json.MarshalToString(map[string]interface{}{
".tag": "root",
"root": d.RootNamespaceId,
})
if err != nil {
return nil, err
}
req.SetHeader("Dropbox-API-Path-Root", apiPathRootJson)
}
if callback != nil {
callback(req)
}
if method == http.MethodPost && req.Body != nil {
req.SetHeader("Content-Type", "application/json")
}
var e ErrorResp
req.SetError(&e)
res, err := req.Execute(method, d.base+uri)

View File

@ -5,6 +5,7 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
@ -23,12 +24,17 @@ type File struct {
Name string `json:"name"`
MimeType string `json:"mimeType"`
ModifiedTime time.Time `json:"modifiedTime"`
CreatedTime time.Time `json:"createdTime"`
Size string `json:"size"`
ThumbnailLink string `json:"thumbnailLink"`
ShortcutDetails struct {
TargetId string `json:"targetId"`
TargetMimeType string `json:"targetMimeType"`
} `json:"shortcutDetails"`
MD5Checksum string `json:"md5Checksum"`
SHA1Checksum string `json:"sha1Checksum"`
SHA256Checksum string `json:"sha256Checksum"`
}
func fileToObj(f File) *model.ObjThumb {
@ -39,10 +45,18 @@ func fileToObj(f File) *model.ObjThumb {
ID: f.Id,
Name: f.Name,
Size: size,
Ctime: f.CreatedTime,
Modified: f.ModifiedTime,
IsFolder: f.MimeType == "application/vnd.google-apps.folder",
HashInfo: utils.NewHashInfoByMap(map[*utils.HashType]string{
utils.MD5: f.MD5Checksum,
utils.SHA1: f.SHA1Checksum,
utils.SHA256: f.SHA256Checksum,
}),
},
Thumbnail: model.Thumbnail{
Thumbnail: f.ThumbnailLink,
},
Thumbnail: model.Thumbnail{},
}
if f.MimeType == "application/vnd.google-apps.shortcut" {
obj.ID = f.ShortcutDetails.TargetId

View File

@ -5,14 +5,14 @@ import (
"crypto/x509"
"encoding/pem"
"fmt"
"github.com/alist-org/alist/v3/pkg/http_range"
"io/ioutil"
"net/http"
"os"
"regexp"
"strconv"
"time"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
@ -43,7 +43,7 @@ func (d *GoogleDrive) refreshToken() error {
gdsaFileThis := d.RefreshToken
if gdsaFile.IsDir() {
if len(d.ServiceAccountFileList) <= 0 {
gdsaReadDir, gdsaDirErr := ioutil.ReadDir(d.RefreshToken)
gdsaReadDir, gdsaDirErr := os.ReadDir(d.RefreshToken)
if gdsaDirErr != nil {
log.Error("read dir fail")
return gdsaDirErr
@ -75,7 +75,7 @@ func (d *GoogleDrive) refreshToken() error {
}
}
gdsaFileThisContent, err := ioutil.ReadFile(gdsaFileThis)
gdsaFileThisContent, err := os.ReadFile(gdsaFileThis)
if err != nil {
return err
}
@ -195,7 +195,7 @@ func (d *GoogleDrive) getFiles(id string) ([]File, error) {
}
query := map[string]string{
"orderBy": orderBy,
"fields": "files(id,name,mimeType,size,modifiedTime,thumbnailLink,shortcutDetails),nextPageToken",
"fields": "files(id,name,mimeType,size,modifiedTime,createdTime,thumbnailLink,shortcutDetails,md5Checksum,sha1Checksum,sha256Checksum),nextPageToken",
"pageSize": "1000",
"q": fmt.Sprintf("'%s' in parents and trashed = false", id),
//"includeItemsFromAllDrives": "true",

View File

@ -58,9 +58,33 @@ func (d *GooglePhoto) Link(ctx context.Context, file model.Obj, args model.LinkA
URL: f.BaseURL + "=d",
}, nil
} else if strings.Contains(f.MimeType, "video/") {
return &model.Link{
URL: f.BaseURL + "=dv",
}, nil
var width, height int
fmt.Sscanf(f.MediaMetadata.Width, "%d", &width)
fmt.Sscanf(f.MediaMetadata.Height, "%d", &height)
switch {
// 1080P
case width == 1920 && height == 1080:
return &model.Link{
URL: f.BaseURL + "=m37",
}, nil
// 720P
case width == 1280 && height == 720:
return &model.Link{
URL: f.BaseURL + "=m22",
}, nil
// 360P
case width == 640 && height == 360:
return &model.Link{
URL: f.BaseURL + "=m18",
}, nil
default:
return &model.Link{
URL: f.BaseURL + "=dv",
}, nil
}
}
return &model.Link{}, nil
}

View File

@ -151,7 +151,7 @@ func (d *GooglePhoto) getMedia(id string) (MediaItem, error) {
var resp MediaItem
query := map[string]string{
"fields": "baseUrl,mimeType",
"fields": "mediaMetadata,baseUrl,mimeType",
}
_, err := d.request(fmt.Sprintf("https://photoslibrary.googleapis.com/v1/mediaItems/%s", id), http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)

378
drivers/ilanzou/driver.go Normal file
View File

@ -0,0 +1,378 @@
package template
import (
"context"
"crypto/md5"
"encoding/base64"
"encoding/hex"
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/foxxorcat/mopan-sdk-go"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type ILanZou struct {
model.Storage
Addition
userID string
account string
upClient *resty.Client
conf Conf
config driver.Config
}
func (d *ILanZou) Config() driver.Config {
return d.config
}
func (d *ILanZou) GetAddition() driver.Additional {
return &d.Addition
}
func (d *ILanZou) Init(ctx context.Context) error {
d.upClient = base.NewRestyClient().SetTimeout(time.Minute * 10)
if d.UUID == "" {
res, err := d.unproved("/getUuid", http.MethodGet, nil)
if err != nil {
return err
}
d.UUID = utils.Json.Get(res, "uuid").ToString()
}
res, err := d.proved("/user/account/map", http.MethodGet, nil)
if err != nil {
return err
}
d.userID = utils.Json.Get(res, "map", "userId").ToString()
d.account = utils.Json.Get(res, "map", "account").ToString()
log.Debugf("[ilanzou] init response: %s", res)
return nil
}
func (d *ILanZou) Drop(ctx context.Context) error {
return nil
}
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
offset := 1
limit := 60
var res []ListItem
for {
var resp ListResp
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(map[string]string{
"type": "0",
"folderId": dir.GetID(),
"offset": strconv.Itoa(offset),
"limit": strconv.Itoa(limit),
}).SetResult(&resp)
})
if err != nil {
return nil, err
}
res = append(res, resp.List...)
if resp.TotalPage <= resp.Offset {
break
}
offset++
}
return utils.SliceConvert(res, func(f ListItem) (model.Obj, error) {
updTime, err := time.ParseInLocation("2006-01-02 15:04:05", f.UpdTime, time.Local)
if err != nil {
return nil, err
}
obj := model.Object{
ID: strconv.FormatInt(f.FileId, 10),
//Path: "",
Name: f.FileName,
Size: f.FileSize * 1024,
Modified: updTime,
Ctime: updTime,
IsFolder: false,
//HashInfo: utils.HashInfo{},
}
if f.FileType == 2 {
obj.IsFolder = true
obj.Size = 0
obj.ID = strconv.FormatInt(f.FolderId, 10)
obj.Name = f.FolderName
}
return &obj, nil
})
}
func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
u, err := url.Parse(d.conf.base + "/" + d.conf.unproved + "/file/redirect")
if err != nil {
return nil, err
}
query := u.Query()
query.Set("uuid", d.UUID)
query.Set("devType", "6")
query.Set("devCode", d.UUID)
query.Set("devModel", "chrome")
query.Set("devVersion", d.conf.devVersion)
query.Set("appVersion", "")
ts, err := getTimestamp(d.conf.secret)
if err != nil {
return nil, err
}
query.Set("timestamp", ts)
query.Set("appToken", d.Token)
query.Set("enable", "1")
downloadId, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%s", file.GetID(), d.userID)), d.conf.secret)
if err != nil {
return nil, err
}
query.Set("downloadId", hex.EncodeToString(downloadId))
auth, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%d", file.GetID(), time.Now().UnixMilli())), d.conf.secret)
if err != nil {
return nil, err
}
query.Set("auth", hex.EncodeToString(auth))
u.RawQuery = query.Encode()
realURL := u.String()
// get the url after redirect
res, err := base.NoRedirectClient.R().Get(realURL)
if err != nil {
return nil, err
}
if res.StatusCode() == 302 {
realURL = res.Header().Get("location")
} else {
return nil, fmt.Errorf("redirect failed, status: %d", res.StatusCode())
}
link := model.Link{URL: realURL}
return &link, nil
}
func (d *ILanZou) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
res, err := d.proved("/file/folder/save", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"folderDesc": "",
"folderId": parentDir.GetID(),
"folderName": dirName,
})
})
if err != nil {
return nil, err
}
return &model.Object{
ID: utils.Json.Get(res, "list", "0", "id").ToString(),
//Path: "",
Name: dirName,
Size: 0,
Modified: time.Now(),
Ctime: time.Now(),
IsFolder: true,
//HashInfo: utils.HashInfo{},
}, nil
}
func (d *ILanZou) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var fileIds, folderIds []string
if srcObj.IsDir() {
folderIds = []string{srcObj.GetID()}
} else {
fileIds = []string{srcObj.GetID()}
}
_, err := d.proved("/file/folder/move", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"folderIds": strings.Join(folderIds, ","),
"fileIds": strings.Join(fileIds, ","),
"targetId": dstDir.GetID(),
})
})
if err != nil {
return nil, err
}
return srcObj, nil
}
func (d *ILanZou) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
var err error
if srcObj.IsDir() {
_, err = d.proved("/file/folder/edit", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"folderDesc": "",
"folderId": srcObj.GetID(),
"folderName": newName,
})
})
} else {
_, err = d.proved("/file/edit", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"fileDesc": "",
"fileId": srcObj.GetID(),
"fileName": newName,
})
})
}
if err != nil {
return nil, err
}
return &model.Object{
ID: srcObj.GetID(),
//Path: "",
Name: newName,
Size: srcObj.GetSize(),
Modified: time.Now(),
Ctime: srcObj.CreateTime(),
IsFolder: srcObj.IsDir(),
}, nil
}
func (d *ILanZou) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
// TODO copy obj, optional
return nil, errs.NotImplement
}
func (d *ILanZou) Remove(ctx context.Context, obj model.Obj) error {
var fileIds, folderIds []string
if obj.IsDir() {
folderIds = []string{obj.GetID()}
} else {
fileIds = []string{obj.GetID()}
}
_, err := d.proved("/file/delete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"folderIds": strings.Join(folderIds, ","),
"fileIds": strings.Join(fileIds, ","),
"status": 0,
})
})
return err
}
const DefaultPartSize = 1024 * 1024 * 8
func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
h := md5.New()
// need to calculate md5 of the full content
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
if _, err = io.Copy(h, tempFile); err != nil {
return nil, err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return nil, err
}
etag := hex.EncodeToString(h.Sum(nil))
// get upToken
res, err := d.proved("/7n/getUpToken", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"fileId": "",
"fileName": stream.GetName(),
"fileSize": stream.GetSize() / 1024,
"folderId": dstDir.GetID(),
"md5": etag,
"type": 1,
})
})
if err != nil {
return nil, err
}
upToken := utils.Json.Get(res, "upToken").ToString()
now := time.Now()
key := fmt.Sprintf("disk/%d/%d/%d/%s/%016d", now.Year(), now.Month(), now.Day(), d.account, now.UnixMilli())
var token string
if stream.GetSize() <= DefaultPartSize {
res, err := d.upClient.R().SetMultipartFormData(map[string]string{
"token": upToken,
"key": key,
"fname": stream.GetName(),
}).SetMultipartField("file", stream.GetName(), stream.GetMimetype(), tempFile).
Post("https://upload.qiniup.com/")
if err != nil {
return nil, err
}
token = utils.Json.Get(res.Body(), "token").ToString()
} else {
keyBase64 := base64.URLEncoding.EncodeToString([]byte(key))
res, err := d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads", d.conf.bucket, keyBase64))
if err != nil {
return nil, err
}
uploadId := utils.Json.Get(res.Body(), "uploadId").ToString()
parts := make([]Part, 0)
partNum := (stream.GetSize() + DefaultPartSize - 1) / DefaultPartSize
for i := 1; i <= int(partNum); i++ {
u := fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s/%d", d.conf.bucket, keyBase64, uploadId, i)
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(io.LimitReader(tempFile, DefaultPartSize)).Put(u)
if err != nil {
return nil, err
}
etag := utils.Json.Get(res.Body(), "etag").ToString()
parts = append(parts, Part{
PartNumber: i,
ETag: etag,
})
}
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(base.Json{
"fnmae": stream.GetName(),
"parts": parts,
}).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s", d.conf.bucket, keyBase64, uploadId))
if err != nil {
return nil, err
}
token = utils.Json.Get(res.Body(), "token").ToString()
}
// commit upload
var resp UploadResultResp
for i := 0; i < 10; i++ {
_, err = d.unproved("/7n/results", http.MethodPost, func(req *resty.Request) {
req.SetQueryParams(map[string]string{
"tokenList": token,
"tokenTime": time.Now().Format("Mon Jan 02 2006 15:04:05 GMT-0700 (MST)"),
}).SetResult(&resp)
})
if err != nil {
return nil, err
}
if len(resp.List) == 0 {
return nil, fmt.Errorf("upload failed, empty response")
}
if resp.List[0].Status == 1 {
break
}
time.Sleep(time.Second * 1)
}
file := resp.List[0]
if file.Status != 1 {
return nil, fmt.Errorf("upload failed, status: %d", resp.List[0].Status)
}
return &model.Object{
ID: strconv.FormatInt(file.FileId, 10),
//Path: ,
Name: file.FileName,
Size: stream.GetSize(),
Modified: stream.ModTime(),
Ctime: stream.CreateTime(),
IsFolder: false,
HashInfo: utils.NewHashInfo(utils.MD5, etag),
}, nil
}
//func (d *ILanZou) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
// return nil, errs.NotSupport
//}
var _ driver.Driver = (*ILanZou)(nil)

77
drivers/ilanzou/meta.go Normal file
View File

@ -0,0 +1,77 @@
package template
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootID
Username string `json:"username" type:"string" required:"true"`
Password string `json:"password" type:"string" required:"true"`
Token string
UUID string
}
type Conf struct {
base string
secret []byte
bucket string
unproved string
proved string
devVersion string
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &ILanZou{
config: driver.Config{
Name: "ILanZou",
LocalSort: false,
OnlyLocal: false,
OnlyProxy: false,
NoCache: false,
NoUpload: false,
NeedMs: false,
DefaultRoot: "0",
CheckStatus: false,
Alert: "",
NoOverwriteUpload: false,
},
conf: Conf{
base: "https://api.ilanzou.com",
secret: []byte("lanZouY-disk-app"),
bucket: "wpanstore-lanzou",
unproved: "unproved",
proved: "proved",
devVersion: "120",
},
}
})
op.RegisterDriver(func() driver.Driver {
return &ILanZou{
config: driver.Config{
Name: "FeijiPan",
LocalSort: false,
OnlyLocal: false,
OnlyProxy: false,
NoCache: false,
NoUpload: false,
NeedMs: false,
DefaultRoot: "0",
CheckStatus: false,
Alert: "",
NoOverwriteUpload: false,
},
conf: Conf{
base: "https://api.feijipan.com",
secret: []byte("dingHao-disk-app"),
bucket: "wpanstore",
unproved: "ws",
proved: "app",
devVersion: "121",
},
}
})
}

57
drivers/ilanzou/types.go Normal file
View File

@ -0,0 +1,57 @@
package template
type ListResp struct {
Msg string `json:"msg"`
Total int `json:"total"`
Code int `json:"code"`
Offset int `json:"offset"`
TotalPage int `json:"totalPage"`
Limit int `json:"limit"`
List []ListItem `json:"list"`
}
type ListItem struct {
IconId int `json:"iconId"`
IsAmt int `json:"isAmt"`
FolderDesc string `json:"folderDesc,omitempty"`
AddTime string `json:"addTime"`
FolderId int64 `json:"folderId"`
ParentId int64 `json:"parentId"`
ParentName string `json:"parentName"`
NoteType int `json:"noteType,omitempty"`
UpdTime string `json:"updTime"`
IsShare int `json:"isShare"`
FolderIcon string `json:"folderIcon,omitempty"`
FolderName string `json:"folderName,omitempty"`
FileType int `json:"fileType"`
Status int `json:"status"`
IsFileShare int `json:"isFileShare,omitempty"`
FileName string `json:"fileName,omitempty"`
FileStars float64 `json:"fileStars,omitempty"`
IsFileDownload int `json:"isFileDownload,omitempty"`
FileComments int `json:"fileComments,omitempty"`
FileSize int64 `json:"fileSize,omitempty"`
FileIcon string `json:"fileIcon,omitempty"`
FileDownloads int `json:"fileDownloads,omitempty"`
FileUrl interface{} `json:"fileUrl"`
FileLikes int `json:"fileLikes,omitempty"`
FileId int64 `json:"fileId,omitempty"`
}
type Part struct {
PartNumber int `json:"partNumber"`
ETag string `json:"etag"`
}
type UploadResultResp struct {
Msg string `json:"msg"`
Code int `json:"code"`
List []struct {
FileIconId int `json:"fileIconId"`
FileName string `json:"fileName"`
FileIcon string `json:"fileIcon"`
FileId int64 `json:"fileId"`
Status int `json:"status"`
Token string `json:"token"`
} `json:"list"`
}

97
drivers/ilanzou/util.go Normal file
View File

@ -0,0 +1,97 @@
package template
import (
"encoding/hex"
"fmt"
"net/http"
"strconv"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/foxxorcat/mopan-sdk-go"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
func (d *ILanZou) login() error {
res, err := d.unproved("/login", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"loginName": d.Username,
"loginPwd": d.Password,
})
})
if err != nil {
return err
}
d.Token = utils.Json.Get(res, "data", "appToken").ToString()
if d.Token == "" {
return fmt.Errorf("failed to login: token is empty, resp: %s", res)
}
return nil
}
func getTimestamp(secret []byte) (string, error) {
ts := time.Now().UnixMilli()
tsStr := strconv.FormatInt(ts, 10)
res, err := mopan.AesEncrypt([]byte(tsStr), secret)
if err != nil {
return "", err
}
return hex.EncodeToString(res), nil
}
func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, proved bool, retry ...bool) ([]byte, error) {
req := base.RestyClient.R()
ts, err := getTimestamp(d.conf.secret)
if err != nil {
return nil, err
}
req.SetQueryParams(map[string]string{
"uuid": d.UUID,
"devType": "6",
"devCode": d.UUID,
"devModel": "chrome",
"devVersion": "120",
"appVersion": "",
"timestamp": ts,
//"appToken": d.Token,
"extra": "2",
})
if proved {
req.SetQueryParam("appToken", d.Token)
}
if callback != nil {
callback(req)
}
res, err := req.Execute(method, d.conf.base+pathname)
if err != nil {
if res != nil {
log.Errorf("[iLanZou] request error: %s", res.String())
}
return nil, err
}
isRetry := len(retry) > 0 && retry[0]
body := res.Body()
code := utils.Json.Get(body, "code").ToInt()
msg := utils.Json.Get(body, "msg").ToString()
if code != 200 {
if !isRetry && proved && (utils.SliceContains([]int{-1, -2}, code) || d.Token == "") {
err = d.login()
if err != nil {
return nil, err
}
return d.request(pathname, method, callback, proved, true)
}
return nil, fmt.Errorf("%d: %s", code, msg)
}
return body, nil
}
func (d *ILanZou) unproved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
return d.request("/"+d.conf.unproved+pathname, method, callback, false)
}
func (d *ILanZou) proved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
return d.request("/"+d.conf.proved+pathname, method, callback, true)
}

View File

@ -258,7 +258,7 @@ var sizeFindReg = regexp.MustCompile(`(?i)大小\W*([0-9.]+\s*[bkm]+)`)
var timeFindReg = regexp.MustCompile(`\d+\s*[秒天分小][钟时]?前|[昨前]天|\d{4}-\d{2}-\d{2}`)
// 查找分享文件夹子文件夹ID和名称
var findSubFolaerReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/(.+?)"(?:.+filename")?>(.+?)<`)
var findSubFolderReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/(.+?)"(?:.+filename")?>(.+?)<`)
// 获取下载页面链接
var findDownPageParamReg = regexp.MustCompile(`<iframe.*?src="(.+?)"`)
@ -455,7 +455,7 @@ func (d *LanZou) getFolderByShareUrl(pwd string, sharePageData string) ([]FileOr
files := make([]FileOrFolderByShareUrl, 0)
// vip获取文件夹
floders := findSubFolaerReg.FindAllStringSubmatch(sharePageData, -1)
floders := findSubFolderReg.FindAllStringSubmatch(sharePageData, -1)
for _, floder := range floders {
if len(floder) == 3 {
files = append(files, FileOrFolderByShareUrl{
@ -476,10 +476,10 @@ func (d *LanZou) getFolderByShareUrl(pwd string, sharePageData string) ([]FileOr
if err != nil {
return nil, err
}
/*// 文件夹中的文件也不加密
// 文件夹中的文件加密
for i := 0; i < len(resp.Text); i++ {
resp.Text[i].Pwd = pwd
}*/
}
if len(resp.Text) == 0 {
break
}

View File

@ -257,10 +257,18 @@ func (d *Local) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
var err error
if obj.IsDir() {
err = os.RemoveAll(obj.GetPath())
if utils.SliceContains([]string{"", "delete permanently"}, d.RecycleBinPath) {
if obj.IsDir() {
err = os.RemoveAll(obj.GetPath())
} else {
err = os.Remove(obj.GetPath())
}
} else {
err = os.Remove(obj.GetPath())
dstPath := filepath.Join(d.RecycleBinPath, obj.GetName())
if utils.Exists(dstPath) {
dstPath = filepath.Join(d.RecycleBinPath, obj.GetName()+"_"+time.Now().Format("20060102150405"))
}
err = os.Rename(obj.GetPath(), dstPath)
}
if err != nil {
return err

View File

@ -11,6 +11,7 @@ type Addition struct {
ThumbCacheFolder string `json:"thumb_cache_folder"`
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
MkdirPerm string `json:"mkdir_perm" default:"777"`
RecycleBinPath string `json:"recycle_bin_path" default:"delete permanently" help:"path to recycle bin, delete permanently if empty or keep 'delete permanently'"`
}
var config = driver.Config{

View File

@ -188,6 +188,9 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
_ = tempFile.Close()
}()
uploader := s3manager.NewUploader(s)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket,
Key: &resp.Data.Object,

View File

@ -4,11 +4,12 @@ import (
"context"
"errors"
"fmt"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/rclone/rclone/lib/readers"
"io"
"time"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/rclone/rclone/lib/readers"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
@ -169,7 +170,7 @@ func (d *Mega) Put(ctx context.Context, dstDir model.Obj, stream model.FileStrea
if err != nil {
return err
}
up(id * 100 / u.Chunks())
up(float64(id) * 100 / float64(u.Chunks()))
}
_, err = u.Finish()

View File

@ -7,6 +7,7 @@ import (
"io"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
@ -42,23 +43,31 @@ func (d *MoPan) Init(ctx context.Context) error {
if d.uploadThread < 1 || d.uploadThread > 32 {
d.uploadThread, d.UploadThread = 3, "3"
}
login := func() error {
data, err := d.client.Login(d.Phone, d.Password)
defer func() { d.SMSCode = "" }()
login := func() (err error) {
var loginData *mopan.LoginResp
if d.SMSCode != "" {
loginData, err = d.client.LoginBySmsStep2(d.Phone, d.SMSCode)
} else {
loginData, err = d.client.Login(d.Phone, d.Password)
}
if err != nil {
return err
}
d.client.SetAuthorization(data.Token)
d.client.SetAuthorization(loginData.Token)
info, err := d.client.GetUserInfo()
if err != nil {
return err
}
d.userID = info.UserID
log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, data.UserCloudStorageRelations)
log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, loginData.UserCloudStorageRelations)
cloudCircleApp, _ := d.client.QueryAllCloudCircleApp()
log.Debugf("[mopan] Phone: %s CloudCircleApp: %+v", d.Phone, cloudCircleApp)
if d.RootFolderID == "" {
for _, userCloudStorage := range data.UserCloudStorageRelations {
for _, userCloudStorage := range loginData.UserCloudStorageRelations {
if userCloudStorage.Path == "/文件" {
d.RootFolderID = userCloudStorage.FolderID
}
@ -75,8 +84,20 @@ func (d *MoPan) Init(ctx context.Context) error {
op.MustSaveDriverStorage(d)
}
return err
}).SetDeviceInfo(d.DeviceInfo)
d.DeviceInfo = d.client.GetDeviceInfo()
})
var deviceInfo mopan.DeviceInfo
if strings.TrimSpace(d.DeviceInfo) != "" && utils.Json.UnmarshalFromString(d.DeviceInfo, &deviceInfo) == nil {
d.client.SetDeviceInfo(&deviceInfo)
}
d.DeviceInfo, _ = utils.Json.MarshalToString(d.client.GetDeviceInfo())
if strings.Contains(d.SMSCode, "send") {
if _, err := d.client.LoginBySms(d.Phone); err != nil {
return err
}
return errors.New("please enter the SMS code")
}
return login()
}
@ -117,6 +138,18 @@ func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
return nil, err
}
data.DownloadUrl = strings.Replace(strings.ReplaceAll(data.DownloadUrl, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetDoNotParseResponse(true).SetContext(ctx).Get(data.DownloadUrl)
if err != nil {
return nil, err
}
defer func() {
_ = res.RawBody().Close()
}()
if res.StatusCode() == 302 {
data.DownloadUrl = res.Header().Get("location")
}
return &model.Link{
URL: data.DownloadUrl,
}, nil
@ -262,7 +295,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
}
if !initUpdload.FileDataExists {
fmt.Println(d.client.CloudDiskStartBusiness())
utils.Log.Error(d.client.CloudDiskStartBusiness())
threadG, upCtx := errgroup.NewGroupWithContext(ctx, d.uploadThread,
retry.Attempts(3),
@ -298,7 +331,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("upload err,code=%d", resp.StatusCode)
}
up(100 * int(threadG.Success()) / len(parts))
up(100 * float64(threadG.Success()) / float64(len(parts)))
initUpdload.PartInfos[i] = ""
return nil
})

View File

@ -8,6 +8,7 @@ import (
type Addition struct {
Phone string `json:"phone" required:"true"`
Password string `json:"password" required:"true"`
SMSCode string `json:"sms_code" help:"input 'send' send sms "`
RootFolderID string `json:"root_folder_id" default:""`

View File

@ -4,6 +4,7 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/foxxorcat/mopan-sdk-go"
)
@ -14,6 +15,8 @@ func fileToObj(f mopan.File) model.Obj {
Name: f.Name,
Size: int64(f.Size),
Modified: time.Time(f.LastOpTime),
Ctime: time.Time(f.CreateDate),
HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
},
Thumbnail: model.Thumbnail{
Thumbnail: f.Icon.SmallURL,
@ -26,6 +29,7 @@ func folderToObj(f mopan.Folder) model.Obj {
ID: string(f.ID),
Name: f.Name,
Modified: time.Time(f.LastOpTime),
Ctime: time.Time(f.CreateDate),
IsFolder: true,
}
}
@ -37,6 +41,7 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
Name: newName,
IsFolder: true,
Modified: o.ModTime(),
Ctime: o.CreateTime(),
}
}
@ -50,6 +55,8 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
Name: newName,
Size: o.GetSize(),
Modified: o.ModTime(),
Ctime: o.CreateTime(),
HashInfo: o.GetHash(),
},
Thumbnail: model.Thumbnail{
Thumbnail: thumb,

View File

@ -4,7 +4,9 @@ import (
"context"
"fmt"
"net/http"
"net/url"
"path"
"sync"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
@ -18,6 +20,8 @@ type Onedrive struct {
model.Storage
Addition
AccessToken string
root *Object
mutex sync.Mutex
}
func (d *Onedrive) Config() driver.Config {
@ -39,6 +43,42 @@ func (d *Onedrive) Drop(ctx context.Context) error {
return nil
}
func (d *Onedrive) GetRoot(ctx context.Context) (model.Obj, error) {
if d.root != nil {
return d.root, nil
}
d.mutex.Lock()
defer d.mutex.Unlock()
root := &Object{
ObjThumb: model.ObjThumb{
Object: model.Object{
ID: "root",
Path: d.RootFolderPath,
Name: "root",
Size: 0,
Modified: d.Modified,
Ctime: d.Modified,
IsFolder: true,
},
},
ParentID: "",
}
if !utils.PathEqual(d.RootFolderPath, "/") {
// get root folder id
url := d.GetMetaUrl(false, d.RootFolderPath)
var resp struct {
Id string `json:"id"`
}
_, err := d.Request(url, http.MethodGet, nil, &resp)
if err != nil {
return nil, err
}
root.ID = resp.Id
}
d.root = root
return d.root, nil
}
func (d *Onedrive) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetPath())
if err != nil {
@ -57,8 +97,17 @@ func (d *Onedrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
if f.File == nil {
return nil, errs.NotFile
}
u := f.Url
if d.CustomHost != "" {
_u, err := url.Parse(f.Url)
if err != nil {
return nil, err
}
_u.Host = d.CustomHost
u = _u.String()
}
return &model.Link{
URL: f.Url,
URL: u,
}, nil
}

View File

@ -15,6 +15,7 @@ type Addition struct {
RefreshToken string `json:"refresh_token" required:"true"`
SiteId string `json:"site_id"`
ChunkSize int64 `json:"chunk_size" type:"number" default:"5"`
CustomHost string `json:"custom_host" help:"Custom host for onedrive download link"`
}
var config = driver.Config{

View File

@ -196,13 +196,14 @@ func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.Fil
if err != nil {
return err
}
if res.StatusCode != 201 && res.StatusCode != 202 {
// https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
data, _ := io.ReadAll(res.Body)
res.Body.Close()
return errors.New(string(data))
}
res.Body.Close()
up(int(finish * 100 / stream.GetSize()))
up(float64(finish) * 100 / float64(stream.GetSize()))
}
return nil
}

View File

@ -4,7 +4,9 @@ import (
"context"
"fmt"
"net/http"
"net/url"
"path"
"sync"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
@ -18,6 +20,8 @@ type OnedriveAPP struct {
model.Storage
Addition
AccessToken string
root *Object
mutex sync.Mutex
}
func (d *OnedriveAPP) Config() driver.Config {
@ -39,6 +43,42 @@ func (d *OnedriveAPP) Drop(ctx context.Context) error {
return nil
}
func (d *OnedriveAPP) GetRoot(ctx context.Context) (model.Obj, error) {
if d.root != nil {
return d.root, nil
}
d.mutex.Lock()
defer d.mutex.Unlock()
root := &Object{
ObjThumb: model.ObjThumb{
Object: model.Object{
ID: "root",
Path: d.RootFolderPath,
Name: "root",
Size: 0,
Modified: d.Modified,
Ctime: d.Modified,
IsFolder: true,
},
},
ParentID: "",
}
if !utils.PathEqual(d.RootFolderPath, "/") {
// get root folder id
url := d.GetMetaUrl(false, d.RootFolderPath)
var resp struct {
Id string `json:"id"`
}
_, err := d.Request(url, http.MethodGet, nil, &resp)
if err != nil {
return nil, err
}
root.ID = resp.Id
}
d.root = root
return d.root, nil
}
func (d *OnedriveAPP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetPath())
if err != nil {
@ -57,8 +97,17 @@ func (d *OnedriveAPP) Link(ctx context.Context, file model.Obj, args model.LinkA
if f.File == nil {
return nil, errs.NotFile
}
u := f.Url
if d.CustomHost != "" {
_u, err := url.Parse(f.Url)
if err != nil {
return nil, err
}
_u.Host = d.CustomHost
u = _u.String()
}
return &model.Link{
URL: f.Url,
URL: u,
}, nil
}

View File

@ -13,6 +13,7 @@ type Addition struct {
TenantID string `json:"tenant_id"`
Email string `json:"email"`
ChunkSize int64 `json:"chunk_size" type:"number" default:"5"`
CustomHost string `json:"custom_host" help:"Custom host for onedrive download link"`
}
var config = driver.Config{

View File

@ -71,8 +71,8 @@ func (d *OnedriveAPP) _accessToken() error {
"grant_type": "client_credentials",
"client_id": d.ClientID,
"client_secret": d.ClientSecret,
"resource": "https://graph.microsoft.com/",
"scope": "https://graph.microsoft.com/.default",
"resource": onedriveHostMap[d.Region].Api + "/",
"scope": onedriveHostMap[d.Region].Api + "/.default",
}).Post(url)
if err != nil {
return err
@ -187,13 +187,14 @@ func (d *OnedriveAPP) upBig(ctx context.Context, dstDir model.Obj, stream model.
if err != nil {
return err
}
if res.StatusCode != 201 && res.StatusCode != 202 {
// https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
data, _ := io.ReadAll(res.Body)
res.Body.Close()
return errors.New(string(data))
}
res.Body.Close()
up(int(finish * 100 / stream.GetSize()))
up(float64(finish) * 100 / float64(stream.GetSize()))
}
return nil
}

View File

@ -3,7 +3,6 @@ package pikpak
import (
"context"
"fmt"
"io"
"net/http"
"strings"
@ -11,6 +10,7 @@ import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
@ -123,22 +123,20 @@ func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
}()
// cal gcid
sha1Str, err := getGcid(tempFile, stream.GetSize())
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
hi := stream.GetHash()
sha1Str := hi.GetHash(hash_extend.GCID)
if len(sha1Str) < hash_extend.GCID.Width {
tFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
sha1Str, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
if err != nil {
return err
}
}
var resp UploadTaskData
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
@ -174,10 +172,13 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return err
}
uploader := s3manager.NewUploader(ss)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{
Bucket: &params.Bucket,
Key: &params.Key,
Body: tempFile,
Body: stream,
}
_, err = uploader.UploadWithContext(ctx, input)
return err

Some files were not shown because too many files have changed in this diff Show More