Compare commits

...

165 Commits

Author SHA1 Message Date
e3b213c398 feat: add ca-certificates for docker (fix: #1679) 2022-09-15 18:56:30 +08:00
d9f0603271 fix: copy folder between two storage (fix #1670) 2022-09-15 17:58:32 +08:00
86a625cb40 fix: set CHARSET to utf8mb4 if use mysql 2022-09-15 17:14:03 +08:00
f22232de5d chore: baidu_photo rename only duplicate folders 2022-09-15 09:25:20 +08:00
7ad3748a46 feat: update cache after remove instead of clear 2022-09-14 20:28:52 +08:00
66b2562d03 fix: allow force root while fetch dirs (close #1671) 2022-09-14 19:57:39 +08:00
b197322cd8 fix: type of file with name uppercase 2022-09-14 15:14:04 +08:00
9e5ef974a7 fix: send on closed channel 2022-09-14 15:13:02 +08:00
08a001fbd1 feat: add a start func for external calls (#1628) 2022-09-13 20:12:57 +08:00
54ae6dce0b fix(fs/get): rawURL if use proxy (close #1664) 2022-09-13 20:02:57 +08:00
a90ef201c7 fix(189pc,baidu_photo,thunder): single link limit multithreading 2022-09-13 18:44:07 +08:00
2de0da87fa fix: infinite loop if new multi-level folder (close #1661) 2022-09-13 18:34:04 +08:00
53e08e75fe fix(189pc,baidu_photo): source file not closed 2022-09-12 22:45:30 +08:00
6b5236f52e feat: add baidu_photo driver 2022-09-12 17:10:02 +08:00
78e34f0d9f fix: log error if err != nil (close #1651) 2022-09-12 17:01:06 +08:00
6aedd0f425 fix: trim slash suffix of sign 2022-09-11 19:39:24 +08:00
5ff0d850d7 feat(aliyundrive): add doc and video preview api 2022-09-11 19:12:54 +08:00
cd73e34ccc chore: optional other interface 2022-09-11 18:40:19 +08:00
107462e42e chore: change default pdf viewer address 2022-09-11 18:27:28 +08:00
e6c2d22700 workflow: update docs address [skip ci] 2022-09-11 17:17:47 +08:00
889ddcef7e feat(baidu): update upload progress 2022-09-11 17:09:48 +08:00
68a6a0c40e fix(aliyundrive): upload empty file 2022-09-11 17:04:05 +08:00
969018db37 fix: is the root folder required (close #1633) 2022-09-11 16:23:46 +08:00
fba1471ec4 docs: add thunder in storage list [skip ci] 2022-09-11 15:26:47 +08:00
8b72ac7f80 chore: rename xunlei to thunder 2022-09-11 14:30:17 +08:00
77a6aa487b chore: cancel sign if no password 2022-09-11 14:14:14 +08:00
fd99c2197b fix: remove relative path check 2022-09-11 14:05:13 +08:00
9c91f062b9 fix(189pc): some minor problems 2022-09-11 13:18:29 +08:00
537ca030b2 chore: fix xunlei some minor problems 2022-09-11 13:09:36 +08:00
b00dcdec0d docs: Create CODE_OF_CONDUCT.md [skip ci] 2022-09-10 22:23:05 +08:00
57bcd376b4 fix(webdav): incorrect href if base_path isn't root (close #1629) 2022-09-10 19:27:34 +08:00
8d4d8648c6 ci: fetch dev version of alist-web 2022-09-10 19:05:02 +08:00
35d177b67b feat: add xunlei driver 2022-09-10 17:40:30 +08:00
40882443c2 feat: add show admin's username 2022-09-10 16:39:08 +08:00
05f19cad78 ci: add since-days for similarity-analysis [skip ci] 2022-09-10 16:18:10 +08:00
7249f277b2 ci: close issue that inactive more than 60 days [skip ci] 2022-09-10 16:10:39 +08:00
849124f177 fix(quark): default root folder id 2022-09-10 14:38:47 +08:00
f5c7a11da5 chore: add client ip to key of link cache 2022-09-10 14:12:57 +08:00
043a79189d style: uniform use utils.CreateTempFile 2022-09-10 14:11:06 +08:00
5ed43fd17d fix(123): pass ip when getting download link 2022-09-10 13:54:10 +08:00
220cd4d6b8 fix: must update version if upgrade 2022-09-10 13:47:38 +08:00
f692e6c011 fix(s3): copy or move folder (close #1336) 2022-09-10 13:42:03 +08:00
f48365929e fix(pikpak): upload empty file (close #1452) 2022-09-10 13:25:52 +08:00
56219bf096 fix(google): folder judgment missed 2022-09-10 13:09:18 +08:00
5ad3849bb6 fix: if use down proxy url 2022-09-09 20:54:11 +08:00
4af9124162 fix: error if use abs temp path (close #1624) 2022-09-09 18:50:54 +08:00
92fba9a2bf ci: remove commit-hash in version 2022-09-09 16:48:12 +08:00
63569be41d fix: wrong columnName index 2022-09-09 16:44:54 +08:00
46325655e1 ci: fix compress filename [skip ci] 2022-09-09 16:31:43 +08:00
85d13c4c5a ci: static link while build musl 2022-09-09 15:51:20 +08:00
af87131cc0 chore: fix release docker name typo [skip ci] 2022-09-09 14:42:55 +08:00
2505cb40ac docs: update readme 2022-09-09 14:35:05 +08:00
4ec42a55d6 ci: fix release files path 2022-09-09 14:15:06 +08:00
7d3c3df207 ci: fix web release url 2022-09-09 13:34:22 +08:00
362d48aa98 chore: replace main color 2022-09-08 22:21:52 +08:00
dea87d098d build: fix Dockerfile CMD arguments 2022-09-08 21:40:37 +08:00
901a74e252 ci: auto release 2022-09-08 21:22:21 +08:00
8705e48e0a ci: auto build docker image 2022-09-08 20:27:13 +08:00
ed5adc21c2 ci: ignore git commit error 2022-09-08 20:04:19 +08:00
fbaebc020f fix(189pc): wrong time if location incorrect (close #1562) 2022-09-08 20:03:07 +08:00
918ca28d2b feat: add 189cloudPC driver 2022-09-08 15:00:57 +08:00
7a12f1bddd chore: add audio_cover setting 2022-09-07 19:18:19 +08:00
4ea19ae078 chore: replace $version of cdn with webVersion 2022-09-07 18:39:04 +08:00
71d30b6819 chore: rename index to order of storage 2022-09-07 15:55:15 +08:00
53fc2f32d8 ci: ignore cp error [skip ci] 2022-09-06 22:45:17 +08:00
e07654299b fix(quark): upload commit bind resp 2022-09-06 22:41:45 +08:00
f127c959a1 feat: add MediaTrack driver 2022-09-06 17:24:05 +08:00
a24dfddc2a feat: add 189cloud driver 2022-09-06 14:39:21 +08:00
534d8d30fc feat: skip generate lang if no changes 2022-09-05 16:40:51 +08:00
868a4fd49e fix(baidu): duplicate prefix of crack link request 2022-09-05 15:59:28 +08:00
900e71f78f feat: add 139yun driver 2022-09-05 13:35:01 +08:00
3416861cab style: use utils.SliceConvert uniformly 2022-09-05 00:26:04 +08:00
25ae1b8397 feat: add yandex disk driver 2022-09-05 00:24:16 +08:00
3dd4fbd76d feat: add webdav driver 2022-09-04 22:34:54 +08:00
778cee4cdf fix: download sign check 2022-09-04 18:29:41 +08:00
9d20c887df fix: webdav_policy options 2022-09-04 14:48:21 +08:00
a1c86b3350 chore!: change root folder 2022-09-04 13:22:42 +08:00
a4a8739748 feat: add upyun-uss driver 2022-09-04 13:03:10 +08:00
ffba5e0aec feat: add sftp driver (close #1466) 2022-09-04 12:43:52 +08:00
8fd56ef9dd feat: check status before storage call 2022-09-03 22:32:09 +08:00
849de88e68 feat: add ftp driver 2022-09-03 22:07:08 +08:00
c89a462d0c feat: add s3 driver 2022-09-03 21:38:43 +08:00
5d0668b00b feat: add google_drive driver 2022-09-03 20:34:06 +08:00
7da9e33c4d fix: hide access_token in error message of baidu_netdisk 2022-09-03 19:48:11 +08:00
dcc99802ec fix: panic while create empty file 2022-09-03 19:32:44 +08:00
552aba997c fix: default root folder of baidu_netdisk 2022-09-03 10:12:28 +08:00
611457c0e7 feat: add baidu_netdisk driver 2022-09-02 22:46:31 +08:00
decea4a739 feat: add quark driver 2022-09-02 21:36:47 +08:00
0f2425ce53 feat: add teambition driver 2022-09-02 18:24:14 +08:00
bc155af255 chore: remove slash of cdn 2022-09-02 16:02:06 +08:00
2d2a4f5776 docs: add go report card [skip ci] 2022-09-01 22:49:47 +08:00
284274b37e feat: add 123pan driver 2022-09-01 22:13:37 +08:00
7290f9b301 chore: remove global_readme setting 2022-09-01 14:17:58 +08:00
454f563bce fix: task id not update 2022-08-31 22:53:41 +08:00
755f4b83f6 feat: add progress for io copy 2022-08-31 22:41:27 +08:00
8e1ed4015b fix: store storage in map whether error or not 2022-08-31 22:27:04 +08:00
d31faabc24 chore: fix typo 2022-08-31 22:08:12 +08:00
b73dce33aa fix(onedrive,ali): upload progress 2022-08-31 22:04:04 +08:00
7ac1d14eeb style: shorten name operations to op 2022-08-31 21:01:15 +08:00
9ec6d5be7a chore: just use std errors in drivers 2022-08-31 20:58:57 +08:00
817d63597e feat: add aliyundrive driver 2022-08-31 20:46:19 +08:00
102384e170 feat: add pikpak driver 2022-08-31 17:32:57 +08:00
7d407de22e feat: add a driver template 2022-08-31 16:37:00 +08:00
41edac5826 fix: convert driver name while generate lang 2022-08-30 22:11:58 +08:00
f551dc76d0 feat: add onedrive driver 2022-08-30 21:52:06 +08:00
c95a7c2a04 chore: add home_container setting 2022-08-30 19:34:11 +08:00
a6b9dbfbe4 fix: use utils.Log in some places 2022-08-30 16:13:01 +08:00
615e5dd118 fix: put a placeholder file in dist [skip ci] 2022-08-30 15:53:40 +08:00
046bbb3a48 feat: use lumberjack for log rotate 2022-08-30 15:22:54 +08:00
59ec17a353 feat: add driver config in driver info 2022-08-30 14:39:10 +08:00
fec98e7f69 ci: auto build dev version 2022-08-29 22:49:20 +08:00
68a125491b chore: add refresh arg in list func 2022-08-29 19:15:52 +08:00
97d4114e38 fix: check err before check upload 2022-08-29 14:18:43 +08:00
d267c43556 feat: static file router 2022-08-28 23:13:03 +08:00
e5480b99be chore: decode filePath in header 2022-08-28 20:46:33 +08:00
e72a557b96 ci: minimize the event that triggers the workflow 2022-08-28 15:39:51 +08:00
a6f3094c9a chore: graceful restart or stop 2022-08-28 15:34:12 +08:00
5ab5cc327f feat: generate plist for ipa 2022-08-28 15:23:00 +08:00
74007a1d45 chore: add pagination settings 2022-08-27 23:07:48 +08:00
37eb3dd8f5 ci: push main branch directly 2022-08-27 18:51:10 +08:00
fbcf082ca7 feat: auto generate settings lang 2022-08-27 18:35:05 +08:00
cc9ccc4e9b ci: auto generate drivers lang file 2022-08-26 19:06:32 +08:00
7425e001db feat: auto generate drivers language json 2022-08-26 15:08:31 +08:00
d9ee174dd3 feat!: unity iframe preview 2022-08-23 16:50:54 +08:00
e9927806d4 fix(local): return ObjectNotFound if can't find file 2022-08-19 11:02:00 +08:00
38db3508a5 chore: add external_previews setting 2022-08-18 11:34:02 +08:00
d1b5c3e648 docs: fix preview dev change 2022-08-17 14:02:05 +08:00
02e2c809a8 chore: rename some request param 2022-08-14 23:52:14 +08:00
8cd05275f0 chore: change message type 2022-08-14 03:05:30 +08:00
fe0dee1196 docs: fix typo 2022-08-13 15:38:03 +08:00
05d8c27918 chore: rename icon_color to main_color 2022-08-13 15:11:46 +08:00
06e15fc149 feat: encode path of url (close #1351) 2022-08-12 14:51:23 +08:00
0f853c86da fix: do not operate storage in memory if disabled 2022-08-11 21:46:03 +08:00
0fdfd1f2c2 feat: load storages while starting 2022-08-11 21:32:33 +08:00
74f1154e5e feat: add disable option for storage (close #1476) 2022-08-11 21:08:50 +08:00
af884010d1 feat: local storage image thumbnail 2022-08-11 20:32:17 +08:00
fda4db71bf ci: new issue bot 2022-08-10 20:05:39 +08:00
669ccc40a1 chore: change related of fs get api 2022-08-10 10:48:14 +08:00
358212749b chore: add home_icon setting 2022-08-09 18:06:04 +08:00
d8b56042c3 chore: ignore opt_secret while marshal 2022-08-08 16:29:56 +08:00
6f48a0a82a chore: add custom office viewer 2022-08-08 13:03:34 +08:00
2b04cf4ac3 feat: custom hide error message by regexp (close #1468) 2022-08-08 12:53:53 +08:00
d6437a337f feat: add provider to obj get api 2022-08-08 00:58:32 +08:00
61fa6f38a8 feat: add type to fs read api 2022-08-08 00:51:05 +08:00
ccce6a30bb ci: temporarily use self-modified issue-helper 2022-08-07 21:03:37 +08:00
1fd4ebe53e feat: add related objs while get obj 2022-08-07 21:01:29 +08:00
2e8322e99b feat: set cache_expiration for each storage (close #1455) 2022-08-07 13:33:53 +08:00
5b40254e3b chore: fix drivers not import 2022-08-07 13:23:15 +08:00
0df3473337 feat: use cobra and add some command 2022-08-07 13:09:59 +08:00
2b5da3ef34 feat: cancel 2fa api 2022-08-07 11:59:33 +08:00
d01958a6bf chore: and otp to current user resp 2022-08-06 17:21:32 +08:00
a6ed4afdae feat: 2fa/otp support 2022-08-06 01:22:13 +08:00
b51e664543 chore: go fmt 2022-08-03 14:26:59 +08:00
721f18a7f4 feat: fs other api 2022-08-03 14:14:37 +08:00
2a68c3cc7b feat: add thumbnail to list resp 2022-08-03 13:03:45 +08:00
71a6ebaf43 chore: dev test 2022-08-02 22:16:58 +08:00
c7128133d6 chore: rename remove to delete 2022-07-31 21:42:01 +08:00
829ef271e3 chore(deps): upgrade cache pkg 2022-07-31 21:23:19 +08:00
cb06d3a19a feat: remove and clear task 2022-07-31 21:21:54 +08:00
be452aafde chore: fix err nil pointer 2022-07-30 22:04:21 +08:00
33b7d75d8a chore: if file exist and size = 0, delete it while upload 2022-07-30 20:04:21 +08:00
8c27ca3e8b chore: import fmt 2022-07-29 18:22:42 +08:00
eface83716 chore: set initial guest permission 0 2022-07-27 21:53:21 +08:00
212dbb277e fix: empty storage virtual file 2022-07-27 20:57:12 +08:00
53fd09814a feat: user and meta get api 2022-07-27 17:41:25 +08:00
256 changed files with 17022 additions and 937 deletions

View File

@ -12,9 +12,10 @@ body:
label: Please make sure of the following things
description: You may select more than one, even select all.
options:
- label: I have read the [documentation](https://alist-doc.nn.ci).
- label: I have read the [documentation](https://alist.nn.ci).
- label: I'm sure there are no duplicate issues or discussions.
- label: I'm sure it's due to `alist` and not something else(such as `Dependencies` or `Operational`).
- label: I'm sure I'm using the latest version
- type: input
id: version
attributes:

View File

@ -7,7 +7,7 @@ body:
label: Please make sure of the following things
description: You may select more than one, even select all.
options:
- label: I have read the [documentation](https://alist-doc.nn.ci).
- label: I have read the [documentation](https://alist.nn.ci).
- label: I'm sure there are no duplicate issues or discussions.
- label: I'm sure this feature is not implemented.
- label: I'm sure it's a reasonable and popular requirement.

64
.github/workflows/auto_lang.yml vendored Normal file
View File

@ -0,0 +1,64 @@
name: auto_lang
on:
push:
branches:
- 'main'
paths:
- 'drivers/**'
- 'internal/bootstrap/data/setting.go'
workflow_dispatch:
jobs:
auto_lang:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ 1.19 ]
name: auto generate lang.json
runs-on: ${{ matrix.platform }}
steps:
- name: Setup go
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go-version }}
- name: Checkout alist
uses: actions/checkout@v2
with:
path: alist
- name: Checkout alist-web
uses: actions/checkout@v3
with:
repository: 'alist-org/alist-web'
ref: main
persist-credentials: false
fetch-depth: 0
path: alist-web
- name: Generate lang
run: |
cd alist
go run ./main.go lang
cd ..
- name: Copy lang file
run: |
cp -f ./alist/lang/*.json ./alist-web/src/lang/en/ 2>/dev/null || :
- name: Commit git
run: |
cd alist-web
git add .
git config --local user.email "i@nn.ci"
git config --local user.name "Noah Hsu"
git commit -m "chore: auto update i18n file" -a 2>/dev/null || :
cd ..
- name: Push lang files
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.MY_TOKEN }}
branch: main
directory: alist-web
repository: alist-org/alist-web

40
.github/workflows/build.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: build
on:
push:
branches: [ '**' ]
pull_request:
branches: [ '**' ]
jobs:
build:
strategy:
matrix:
platform: [ubuntu-latest]
go-version: [1.18]
name: Build
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
- name: Install dependencies
run: |
docker pull techknowlogick/xgo:latest
go install src.techknowlogick.com/xgo@latest
sudo apt install upx
- name: Build
run: |
bash build.sh dev
- name: Upload artifact
uses: actions/upload-artifact@v2
with:
name: alist
path: dist

39
.github/workflows/build_docker.yml vendored Normal file
View File

@ -0,0 +1,39 @@
name: build_docker
on:
push:
branches: [ main ]
jobs:
build_docker:
name: Docker
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Docker meta
id: meta
uses: docker/metadata-action@v3
with:
images: xhofe/alist
- name: Replace release with dev
run: |
sed -i 's/release/dev/g' Dockerfile
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: xhofe
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64

View File

@ -0,0 +1,17 @@
name: Check inactive
on:
schedule:
- cron: "0 0 1 * *"
jobs:
check-inactive:
runs-on: ubuntu-latest
steps:
- name: check-inactive
uses: actions-cool/issues-helper@v3
with:
actions: 'check-inactive'
token: ${{ secrets.GITHUB_TOKEN }}
inactive-day: 30
body: Hello, this issue has been inactive for more than 30 days and will be closed if inactive for another 30 days.

View File

@ -0,0 +1,20 @@
name: Close inactive
on:
schedule:
- cron: "0 0 */7 * *"
jobs:
close-inactive:
runs-on: ubuntu-latest
steps:
- name: close-issues
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
token: ${{ secrets.GITHUB_TOKEN }}
labels: 'inactive'
inactive-day: 30
close-reason: 'not_planned'
body: |
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 60 days. You can reopen or recreate it if you think it should continue.

View File

@ -1,20 +1,21 @@
name: Check need info
name: Close need info
on:
schedule:
- cron: "0 0 */7 * *"
jobs:
check-need-info:
close-need-info:
runs-on: ubuntu-latest
steps:
- name: close-issues
uses: actions-cool/issues-helper@v2
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
token: ${{ secrets.GITHUB_TOKEN }}
labels: 'question'
inactive-day: 7
close-reason: 'not_planned'
body: |
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 7 days.
你好 @${{ github.event.issue.user.login }}此issue因超过7天未回复被关闭。

View File

@ -19,7 +19,7 @@ jobs:
Hello @${{ github.event.issue.user.login }}, your issue is a duplicate and will be closed.
你好 @${{ github.event.issue.user.login }}你的issue是重复的将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v2
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -19,7 +19,7 @@ jobs:
Hello @${{ github.event.issue.user.login }}, your issue is invalid and will be closed.
你好 @${{ github.event.issue.user.login }}你的issue无效将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v2
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,16 +0,0 @@
name: Issue Month Statistics
on:
schedule:
- cron: "0 1 1 * *"
jobs:
month-statistics:
runs-on: ubuntu-latest
steps:
- name: month-statistics
uses: actions-cool/issues-month-statistics@v1
with:
count-lables: true
count-comments: true
emoji: 'eyes'

19
.github/workflows/issue_similarity.yml vendored Normal file
View File

@ -0,0 +1,19 @@
name: Issues Similarity Analysis
on:
issues:
types: [opened, edited]
jobs:
similarity-analysis:
runs-on: ubuntu-latest
steps:
- name: analysis
uses: actions-cool/issues-similarity-analysis@v1
with:
filter-threshold: 0.5
comment-title: '### See'
comment-body: '${index}. ${similarity} #${number}'
show-footer: false
show-mentioned: true
since-days: 730

13
.github/workflows/issue_translate.yml vendored Normal file
View File

@ -0,0 +1,13 @@
name: Translation Helper
on:
pull_request_target:
types: [opened]
issues:
types: [opened]
jobs:
translate:
runs-on: ubuntu-latest
steps:
- uses: actions-cool/translation-helper@v1.2.0

View File

@ -19,7 +19,7 @@ jobs:
Hello @${{ github.event.issue.user.login }}, this issue will not be worked on and will be closed.
你好 @${{ github.event.issue.user.login }},这不会被处理,将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v2
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

52
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,52 @@
name: release
on:
push:
tags:
- '*'
jobs:
changelog:
name: Create Release
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
with:
fetch-depth: 0
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
env:
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
release:
needs: changelog
strategy:
matrix:
platform: [ubuntu-latest]
go-version: [1.18]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Install dependencies
run: |
docker pull techknowlogick/xgo:latest
go install src.techknowlogick.com/xgo@latest
sudo apt install upx
- name: Build
run: |
bash build.sh release
- name: Release
uses: softprops/action-gh-release@v1
with:
files: build/compress/*

42
.github/workflows/release_docker.yml vendored Normal file
View File

@ -0,0 +1,42 @@
name: release_docker
on:
push:
tags:
- '*'
jobs:
release_docker:
name: Docker
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Docker meta
id: meta
uses: docker/metadata-action@v3
with:
images: xhofe/alist
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: xhofe
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x

14
.gitignore vendored
View File

@ -1,7 +1,7 @@
.idea/
.DS_Store
output/
dist/
/dist/
# Binaries for programs and plugins
*.exe
@ -21,11 +21,9 @@ dist/
# Dependency directories (remove the comment below to include it)
# vendor/
bin/*
/alist
/alist.exe
*.json
public/*.html
public/assets/
public/public/
/data
log/
data/
log/
lang/
public/dist/*
!public/dist/README.md

128
CODE_OF_CONDUCT.md Normal file
View File

@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
i@nn.ci.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View File

@ -14,21 +14,24 @@ Prerequisites:
Clone `alist` and `alist-web` anywhere:
```shell
$ git clone https://github.com/Xhofe/alist.git
$ git clone https://github.com/Xhofe/alist-web.git
$ git clone https://github.com/alist-org/alist.git
$ git clone --recurse-submodules https://github.com/alist-org/alist-web.git
```
You should switch to the `main` branch for development.
## Preview your change
### backend
```shell
$ go run cmd/alist.go
$ go run main.go
```
### frontend
```shell
$ yarn dev
$ pnpm dev
```
## Add a new driver
Copy `drivers/template` folder and rename it, and follow the comments in it.
## Create a commit
Commit messages should be well formatted, and to make that "standardized".
@ -73,7 +76,6 @@ Must be one of the following:
* **chore**: Changes to the build process or auxiliary tools and libraries such as documentation
generation
* **release**: Release a new version
* **workflow**: Workflow related file modification
### Scope
The scope could be anything specifying place of the commit change. For example `$location`,

15
Dockerfile Normal file
View File

@ -0,0 +1,15 @@
FROM alpine:edge as builder
LABEL stage=go-builder
WORKDIR /app/
COPY ./ ./
RUN apk add --no-cache bash git go gcc musl-dev curl; \
bash build.sh release docker
FROM alpine:edge
LABEL MAINTAINER="i@nn.ci"
VOLUME /opt/alist/data/
WORKDIR /opt/alist/
COPY --from=builder /app/bin/alist ./
RUN apk add ca-certificates
EXPOSE 5244
CMD [ "./alist", "server", "--no-prefix" ]

View File

@ -1,21 +1,82 @@
<div align="center">
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<p><em>🗂A file list program that supports multiple storage, powered by Gin and React.</em></p>
<a href="https://github.com/Xhofe/alist/releases"><img src="https://img.shields.io/github/release/Xhofe/alist?style=flat-square" alt="latest version"></a>
<a href="https://github.com/Xhofe/alist/discussions"><img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936&style=flat-square" alt="discussions"></a>
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild"><img src="https://img.shields.io/github/workflow/status/Xhofe/alist/build?style=flat-square" alt="Build status"></a>
<a href="https://github.com/Xhofe/alist/releases"><img src="https://img.shields.io/github/downloads/Xhofe/alist/total?style=flat-square&color=%239F7AEA" alt="Downloads"></a>
<a href="https://github.com/Xhofe/alist/blob/v2/LICENSE"><img src="https://img.shields.io/github/license/Xhofe/alist?style=flat-square" alt="License"></a>
<p><em>🗂A file list program that supports multiple storage, powered by Gin and Solidjs.</em></p>
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
</a>
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
</a>
<a href="https://github.com/Xhofe/alist/discussions">
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
</a>
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
<img src="https://img.shields.io/github/workflow/status/Xhofe/alist/build" alt="Build status" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA" alt="Downloads" />
</a>
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
<img src="https://badges.crowdin.net/alist/localized.svg">
</a>
<a href="https://pay.xhofe.top">
<img src="https://img.shields.io/badge/%24-donate-ff69b4.svg?style=flat-square" alt="donate">
<img src="https://img.shields.io/badge/%24-sponsor-ff69b4.svg" alt="sponsor" />
</a>
</div>
---
[Contributors](./CONTRIBUTORS.md) | [Contributing](./CONTRIBUTING.md)
English | [中文](./README_cn.md) | [Contributors](./CONTRIBUTORS.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
> ### New version is under development. Checkout [v2](https://github.com/alist-org/alist/tree/v2) branch for latest version.
## Features
- [x] Multiple storage
- [x] Local storage
- [x] [Aliyundrive](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
- [x] [GoogleDrive](https://drive.google.com/)
- [x] [123pan](https://www.123pan.com/)
- [x] FTP / SFTP
- [x] [PikPak](https://www.mypikpak.com/)
- [x] [S3](https://aws.amazon.com/s3/)
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
- [x] WebDav(Support OneDrive/SharePoint without API)
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
- [x] [Mediatrack](https://www.mediatrack.cn/)
- [x] [139yun](https://yun.139.com/) (Personal, Family)
- [x] [YandexDisk](https://disk.yandex.com/)
- [x] [BaiduNetdisk](http://pan.baidu.com/)
- [x] [Quark](https://pan.quark.cn)
- [x] [Thunder](https://pan.xunlei.com)
- [x] Easy to deploy and out-of-the-box
- [x] File preview (PDF, markdown, code, plain text, ...)
- [x] Image preview in gallery mode
- [x] Video and audio preview, support lyrics and subtitles
- [x] Office documents preview (docx, pptx, xlsx, ...)
- [x] `README.md` preview rendering
- [x] File permalink copy and direct file download
- [x] Dark mode
- [x] I18n
- [x] Protected routes (password protection and authentication)
- [x] WebDav (see https://alist.nn.ci/guide/webdav.html for details)
- [x] [Docker Deploy](https://hub.docker.com/r/xhofe/alist)
- [x] Cloudflare workers proxy
- [x] File/Folder package download
- [x] Web upload(Can allow visitors to upload), delete, mkdir, rename, move and copy
- [x] Offline download
- [x] Copy files between two storage
## Document
<https://alist.nn.ci/>
## Demo
<https://pan.nn.ci>
## Discussion

103
README_cn.md Normal file
View File

@ -0,0 +1,103 @@
<div align="center">
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<p><em>🗂一个支持多存储的文件列表程序,使用 Gin 和 Solidjs。</em></p>
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
</a>
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
</a>
<a href="https://github.com/Xhofe/alist/discussions">
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
</a>
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
<img src="https://img.shields.io/github/workflow/status/Xhofe/alist/build" alt="Build status" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA" alt="Downloads" />
</a>
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
<img src="https://badges.crowdin.net/alist/localized.svg">
</a>
<a href="https://pay.xhofe.top">
<img src="https://img.shields.io/badge/%24-sponsor-ff69b4.svg" alt="sponsor" />
</a>
</div>
---
[English](./README.md) | 中文 | [Contributors](./CONTRIBUTORS.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
## Features
- [x] 多种存储
- [x] 本地存储
- [x] [阿里云盘](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint[国际版](https://www.office.com/), [世纪互联](https://portal.partner.microsoftonline.cn),de,us
- [x] [天翼云盘](https://cloud.189.cn) (个人云, 家庭云)
- [x] [GoogleDrive](https://drive.google.com/)
- [x] [123云盘](https://www.123pan.com/)
- [x] FTP / SFTP
- [x] [PikPak](https://www.mypikpak.com/)
- [x] [S3](https://aws.amazon.com/cn/s3/)
- [x] [又拍云对象存储](https://www.upyun.com/products/file-storage)
- [x] WebDav(支持无API的OneDrive/SharePoint)
- [x] Teambition[中国](https://www.teambition.com/ )[国际](https://us.teambition.com/ )
- [x] [分秒帧](https://www.mediatrack.cn/)
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云)
- [x] [Yandex.Disk](https://disk.yandex.com/)
- [x] [百度网盘](http://pan.baidu.com/)
- [x] [夸克网盘](https://pan.quark.cn)
- [x] [迅雷网盘](https://pan.xunlei.com)
- [x] 部署方便,开箱即用
- [x] 文件预览PDF、markdown、代码、纯文本……
- [x] 画廊模式下的图像预览
- [x] 视频和音频预览,支持歌词和字幕
- [x] Office 文档预览docx、pptx、xlsx、...
- [x] `README.md` 预览渲染
- [x] 文件永久链接复制和直接文件下载
- [x] 黑暗模式
- [x] 国际化
- [x] 受保护的路由(密码保护和身份验证)
- [x] WebDav (具体见 https://alist.nn.ci/zh/guide/webdav.html)
- [x] [Docker 部署](https://hub.docker.com/r/xhofe/alist)
- [x] Cloudflare workers 中转
- [x] 文件/文件夹打包下载
- [x] 网页上传(可以允许访客上传),删除,新建文件夹,重命名,移动,复制
- [x] 离线下载
- [x] 跨存储复制文件
## Document
<https://alist.nn.ci/zh/>
## Demo
<https://pan.nn.ci>
## Discussion
一般问题请到[讨论论坛](https://github.com/Xhofe/alist/discussions) **issue仅针对错误报告和功能请求。**
## Special sponsors
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.la/)
- [KinhDown 百度云盘不限速下载永久免费已稳定运行3年非常可靠!](https://kinhdown.com/?Type=Tutorials)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
## 许可
`AList` 是在 AGPL-3.0 许可下许可的开源软件。
## 免责声明
- 本程序为免费开源项目旨在分享网盘文件方便下载以及学习golang使用时请遵守相关法律法规请勿滥用
- 本程序通过调用官方sdk/接口实现,无破坏官方接口行为;
- 本程序仅做302重定向/流量转发,不拦截、存储、篡改任何用户数据;
- 在使用本程序之前你应了解并承担相应的风险包括但不限于账号被ban下载限速等与本程序无关
- 如有侵权,请通过[邮件](mailto:i@nn.ci)与我联系,会及时处理。
---
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@QQ群](https://jq.qq.com/?_wv=1027&k=YJJj2Gwb)

130
build.sh Normal file
View File

@ -0,0 +1,130 @@
appName="alist"
builtAt="$(date +'%F %T %z')"
goVersion=$(go version | sed 's/go version //')
gitAuthor=$(git show -s --format='format:%aN <%ae>' HEAD)
gitCommit=$(git log --pretty=format:"%h" -1)
if [ "$1" = "dev" ]; then
version="dev"
webVersion="dev"
else
version=$(git describe --abbrev=0 --tags)
webVersion=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
fi
echo "build version: $gitTag"
ldflags="\
-w -s \
-X 'github.com/alist-org/alist/v3/internal/conf.BuiltAt=$builtAt' \
-X 'github.com/alist-org/alist/v3/internal/conf.GoVersion=$goVersion' \
-X 'github.com/alist-org/alist/v3/internal/conf.GitAuthor=$gitAuthor' \
-X 'github.com/alist-org/alist/v3/internal/conf.GitCommit=$gitCommit' \
-X 'github.com/alist-org/alist/v3/internal/conf.Version=$version' \
-X 'github.com/alist-org/alist/v3/internal/conf.WebVersion=$webVersion' \
"
FetchWebDev() {
curl -L https://codeload.github.com/alist-org/web-dist/tar.gz/refs/heads/dev -o web-dist-dev.tar.gz
tar -zxvf web-dist-dev.tar.gz
rm -rf public/dist
mv -f web-dist-dev/dist public
rm -rf web-dist-dev web-dist-dev.tar.gz
}
FetchWebRelease() {
curl -L https://github.com/alist-org/alist-web/releases/latest/download/dist.tar.gz -o dist.tar.gz
tar -zxvf dist.tar.gz
rm -rf public/dist
mv -f dist public
rm -rf dist.tar.gz
}
BuildDev() {
rm -rf .git/
xgo -targets=linux/amd64,windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
mkdir -p "dist"
mv alist-* dist
cd dist
upx -9 ./alist-linux*
upx -9 ./alist-windows*
find . -type f -print0 | xargs -0 md5sum >md5.txt
cat md5.txt
}
BuildDocker() {
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
}
BuildRelease() {
rm -rf .git/
mkdir -p "build"
muslflags="--extldflags '-static -fpic' $ldflags"
BASE="https://musl.nn.ci/"
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross arm-linux-musleabihf-cross mips-linux-musl-cross mips64-linux-musl-cross mips64el-linux-musl-cross mipsel-linux-musl-cross powerpc64le-linux-musl-cross s390x-linux-musl-cross)
for i in "${FILES[@]}"; do
url="${BASE}${i}.tgz"
curl -L -o "${i}.tgz" "${url}"
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
done
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64 linux-musl-arm linux-musl-mips linux-musl-mips64 linux-musl-mips64le linux-musl-mipsle linux-musl-ppc64le linux-musl-s390x)
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc arm-linux-musleabihf-gcc mips-linux-musl-gcc mips64-linux-musl-gcc mips64el-linux-musl-gcc mipsel-linux-musl-gcc powerpc64le-linux-musl-gcc s390x-linux-musl-gcc)
for i in "${!OS_ARCHES[@]}"; do
os_arch=${OS_ARCHES[$i]}
cgo_cc=${CGO_ARGS[$i]}
echo building for ${os_arch}
export GOOS=${os_arch%%-*}
export GOARCH=${os_arch##*-}
export CC=${cgo_cc}
export CGO_ENABLED=1
go build -o ./build/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
done
xgo -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
# why? Because some target platforms seem to have issues with upx compression
upx -9 ./alist-linux-amd64
upx -9 ./alist-windows*
mv alist-* build
}
MakeRelease() {
cd build
mkdir compress
for i in $(find . -type f -name "$appName-linux-*"); do
cp "$i" alist
tar -czvf compress/"$i".tar.gz alist
rm -f alist
done
for i in $(find . -type f -name "$appName-darwin-*"); do
cp "$i" alist
tar -czvf compress/"$i".tar.gz alist
rm -f alist
done
for i in $(find . -type f -name "$appName-windows-*"); do
cp "$i" alist.exe
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
rm -f alist.exe
done
cd compress
find . -type f -print0 | xargs -0 md5sum >md5.txt
cat md5.txt
cd ../..
}
if [ "$1" = "dev" ]; then
FetchWebDev
if [ "$2" = "docker" ]; then
BuildDocker
else
BuildDev
fi
elif [ "$1" = "release" ]; then
FetchWebRelease
if [ "$2" = "docker" ]; then
BuildDocker
else
BuildRelease
MakeRelease
fi
else
echo -e "Parameter error"
fi

40
cmd/admin.go Normal file
View File

@ -0,0 +1,40 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"github.com/alist-org/alist/v3/internal/db"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/spf13/cobra"
)
// passwordCmd represents the password command
var passwordCmd = &cobra.Command{
Use: "admin",
Aliases: []string{"password"},
Short: "Show admin user's info",
Run: func(cmd *cobra.Command, args []string) {
Init()
admin, err := db.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)
} else {
utils.Log.Infof("admin user's info: \nusername: %s\npassword: %s", admin.Username, admin.Password)
}
},
}
func init() {
rootCmd.AddCommand(passwordCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// passwordCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// passwordCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

View File

@ -1,59 +0,0 @@
package main
import (
"flag"
"fmt"
bootstrap2 "github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/server"
"os"
"github.com/alist-org/alist/v3/cmd/args"
_ "github.com/alist-org/alist/v3/drivers"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
)
func init() {
flag.StringVar(&args.Config, "conf", "data/config.json", "config file")
flag.BoolVar(&args.Debug, "debug", false, "start with debug mode")
flag.BoolVar(&args.Version, "version", false, "print version info")
flag.BoolVar(&args.Password, "password", false, "print current password")
flag.BoolVar(&args.NoPrefix, "no-prefix", false, "disable env prefix")
flag.BoolVar(&args.Dev, "dev", false, "start with dev mode")
flag.Parse()
}
func Init() {
if args.Version {
fmt.Printf("Built At: %s\nGo Version: %s\nAuthor: %s\nCommit ID: %s\nVersion: %s\nWebVersion: %s\n",
conf.BuiltAt, conf.GoVersion, conf.GitAuthor, conf.GitCommit, conf.Version, conf.WebVersion)
os.Exit(0)
}
bootstrap2.InitConfig()
bootstrap2.Log()
bootstrap2.InitDB()
data.InitData()
bootstrap2.InitAria2()
}
func main() {
Init()
if !args.Debug && !args.Dev {
gin.SetMode(gin.ReleaseMode)
}
r := gin.New()
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
server.Init(r)
base := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port)
log.Infof("start server @ %s", base)
var err error
if conf.Conf.Scheme.Https {
err = r.RunTLS(base, conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
} else {
err = r.Run(base)
}
if err != nil {
log.Errorf("failed to start: %s", err.Error())
}
}

42
cmd/cancel2FA.go Normal file
View File

@ -0,0 +1,42 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"github.com/alist-org/alist/v3/internal/db"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/spf13/cobra"
)
// cancel2FACmd represents the delete2fa command
var cancel2FACmd = &cobra.Command{
Use: "cancel2fa",
Short: "Delete 2FA of admin user",
Run: func(cmd *cobra.Command, args []string) {
Init()
admin, err := db.GetAdmin()
if err != nil {
utils.Log.Errorf("failed to get admin user: %+v", err)
} else {
err := db.Cancel2FAByUser(admin)
if err != nil {
utils.Log.Errorf("failed to cancel 2FA: %+v", err)
}
}
},
}
func init() {
rootCmd.AddCommand(cancel2FACmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// cancel2FACmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// cancel2FACmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

13
cmd/common.go Normal file
View File

@ -0,0 +1,13 @@
package cmd
import (
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
)
func Init() {
bootstrap.InitConfig()
bootstrap.Log()
bootstrap.InitDB()
data.InitData()
}

View File

@ -1,10 +1,8 @@
package args
package flags
var (
Config string // config file
Debug bool
Version bool
Password bool
NoPrefix bool
Dev bool
)

152
cmd/lang.go Normal file
View File

@ -0,0 +1,152 @@
/*
Package cmd
Copyright © 2022 Noah Hsu<i@nn.ci>
*/
package cmd
import (
"fmt"
"io"
"os"
"reflect"
"strings"
_ "github.com/alist-org/alist/v3/drivers"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
type KV[V any] map[string]V
type Drivers KV[KV[interface{}]]
func firstUpper(s string) string {
if s == "" {
return ""
}
return strings.ToUpper(s[:1]) + s[1:]
}
func convert(s string) string {
ss := strings.Split(s, "_")
ans := strings.Join(ss, " ")
return firstUpper(ans)
}
func writeFile(name string, data interface{}) {
f, err := os.Open(fmt.Sprintf("../alist-web/src/lang/en/%s.json", name))
if err != nil {
log.Errorf("failed to open %s.json: %+v", name, err)
return
}
defer f.Close()
content, err := io.ReadAll(f)
if err != nil {
log.Errorf("failed to read %s.json: %+v", name, err)
return
}
oldData := make(map[string]interface{})
newData := make(map[string]interface{})
err = utils.Json.Unmarshal(content, &oldData)
if err != nil {
log.Errorf("failed to unmarshal %s.json: %+v", name, err)
return
}
content, err = utils.Json.Marshal(data)
if err != nil {
log.Errorf("failed to marshal json: %+v", err)
return
}
err = utils.Json.Unmarshal(content, &newData)
if err != nil {
log.Errorf("failed to unmarshal json: %+v", err)
return
}
if reflect.DeepEqual(oldData, newData) {
log.Infof("%s.json no changed, skip", name)
} else {
log.Infof("%s.json changed, update file", name)
//log.Infof("old: %+v\nnew:%+v", oldData, data)
utils.WriteJsonToFile(fmt.Sprintf("lang/%s.json", name), data)
}
}
func generateDriversJson() {
drivers := make(Drivers)
drivers["drivers"] = make(KV[interface{}])
driverInfoMap := op.GetDriverInfoMap()
for k, v := range driverInfoMap {
drivers["drivers"][k] = convert(k)
items := make(KV[interface{}])
for i := range v.Additional {
item := v.Additional[i]
items[item.Name] = convert(item.Name)
if item.Help != "" {
items[fmt.Sprintf("%s-tips", item.Name)] = item.Help
}
if item.Type == conf.TypeSelect && len(item.Options) > 0 {
options := make(KV[string])
_options := strings.Split(item.Options, ",")
for _, o := range _options {
options[o] = convert(o)
}
items[fmt.Sprintf("%ss", item.Name)] = options
}
}
drivers[k] = items
}
writeFile("drivers", drivers)
}
func generateSettingsJson() {
settings := data.InitialSettings()
settingsLang := make(KV[any])
for _, setting := range settings {
settingsLang[setting.Key] = convert(setting.Key)
if setting.Help != "" {
settingsLang[fmt.Sprintf("%s-tips", setting.Key)] = setting.Help
}
if setting.Type == conf.TypeSelect && len(setting.Options) > 0 {
options := make(KV[string])
_options := strings.Split(setting.Options, ",")
for _, o := range _options {
options[o] = convert(o)
}
settingsLang[fmt.Sprintf("%ss", setting.Key)] = options
}
}
writeFile("settings", settingsLang)
//utils.WriteJsonToFile("lang/settings.json", settingsLang)
}
// langCmd represents the lang command
var langCmd = &cobra.Command{
Use: "lang",
Short: "Generate language json file",
Run: func(cmd *cobra.Command, args []string) {
err := os.MkdirAll("lang", 0777)
if err != nil {
utils.Log.Fatal("failed create folder: %s", err.Error())
}
generateDriversJson()
generateSettingsJson()
},
}
func init() {
rootCmd.AddCommand(langCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// langCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// langCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

31
cmd/root.go Normal file
View File

@ -0,0 +1,31 @@
package cmd
import (
"fmt"
"os"
"github.com/alist-org/alist/v3/cmd/flags"
"github.com/spf13/cobra"
)
var rootCmd = &cobra.Command{
Use: "alist",
Short: "A file list program that supports multiple storage.",
Long: `A file list program that supports multiple storage,
built with love by Xhofe and friends in Go/Solid.js.
Complete documentation is available at https://alist.nn.ci/`,
}
func Execute() {
if err := rootCmd.Execute(); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func init() {
rootCmd.PersistentFlags().StringVar(&flags.Config, "conf", "data/config.json", "config file")
rootCmd.PersistentFlags().BoolVar(&flags.Debug, "debug", false, "start with debug mode")
rootCmd.PersistentFlags().BoolVar(&flags.NoPrefix, "no-prefix", false, "disable env prefix")
rootCmd.PersistentFlags().BoolVar(&flags.Dev, "dev", false, "start with dev mode")
}

99
cmd/server.go Normal file
View File

@ -0,0 +1,99 @@
package cmd
import (
"context"
"fmt"
"net/http"
"os"
"os/signal"
"syscall"
"time"
"github.com/alist-org/alist/v3/cmd/flags"
_ "github.com/alist-org/alist/v3/drivers"
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
// serverCmd represents the server command
var serverCmd = &cobra.Command{
Use: "server",
Short: "Start the server at the specified address",
Long: `Start the server at the specified address
the address is defined in config file`,
Run: func(cmd *cobra.Command, args []string) {
Init()
bootstrap.InitAria2()
bootstrap.LoadStorages()
if !flags.Debug && !flags.Dev {
gin.SetMode(gin.ReleaseMode)
}
r := gin.New()
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
server.Init(r)
base := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port)
utils.Log.Infof("start server @ %s", base)
srv := &http.Server{Addr: base, Handler: r}
go func() {
var err error
if conf.Conf.Scheme.Https {
//err = r.RunTLS(base, conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
err = srv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
} else {
err = srv.ListenAndServe()
}
if err != nil && err != http.ErrServerClosed {
utils.Log.Fatalf("failed to start: %s", err.Error())
}
}()
// Wait for interrupt signal to gracefully shutdown the server with
// a timeout of 5 seconds.
quit := make(chan os.Signal)
// kill (no param) default send syscanll.SIGTERM
// kill -2 is syscall.SIGINT
// kill -9 is syscall. SIGKILL but can"t be catch, so don't need add it
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
<-quit
utils.Log.Println("Shutdown Server ...")
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
if err := srv.Shutdown(ctx); err != nil {
utils.Log.Fatal("Server Shutdown:", err)
}
// catching ctx.Done(). timeout of 3 seconds.
select {
case <-ctx.Done():
utils.Log.Println("timeout of 3 seconds.")
}
utils.Log.Println("Server exiting")
},
}
func init() {
rootCmd.AddCommand(serverCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// serverCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// serverCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}
// OutAlistInit 暴露用于外部启动server的函数
func OutAlistInit() {
var (
cmd *cobra.Command
args []string
)
serverCmd.Run(cmd, args)
}

44
cmd/version.go Normal file
View File

@ -0,0 +1,44 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"fmt"
"os"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/spf13/cobra"
)
// versionCmd represents the version command
var versionCmd = &cobra.Command{
Use: "version",
Short: "Show current version of AList",
Run: func(cmd *cobra.Command, args []string) {
fmt.Printf(`Built At: %s
Go Version: %s
Author: %s
Commit ID: %s
Version: %s
WebVersion: %s
`,
conf.BuiltAt, conf.GoVersion, conf.GitAuthor, conf.GitCommit, conf.Version, conf.WebVersion)
os.Exit(0)
},
}
func init() {
rootCmd.AddCommand(versionCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// versionCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// versionCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

260
drivers/123/driver.go Normal file
View File

@ -0,0 +1,260 @@
package _123
import (
"bytes"
"context"
"crypto/md5"
"encoding/binary"
"encoding/hex"
"fmt"
"io"
"net/http"
"net/url"
"os"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type Pan123 struct {
model.Storage
Addition
AccessToken string
}
func (d *Pan123) Config() driver.Config {
return config
}
func (d *Pan123) GetAddition() driver.Additional {
return d.Addition
}
func (d *Pan123) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
return d.login()
}
func (d *Pan123) Drop(ctx context.Context) error {
return nil
}
func (d *Pan123) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return src, nil
})
}
//func (d *Pan123) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if f, ok := file.(File); ok {
var resp DownResp
var headers map[string]string
if !utils.IsLocalIPAddr(args.IP) {
headers = map[string]string{
//"X-Real-IP": "1.1.1.1",
"X-Forwarded-For": args.IP,
}
}
data := base.Json{
"driveId": 0,
"etag": f.Etag,
"fileId": f.FileId,
"fileName": f.FileName,
"s3keyFlag": f.S3KeyFlag,
"size": f.Size,
"type": f.Type,
}
_, err := d.request("https://www.123pan.com/api/file/download_info", http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetHeaders(headers)
}, &resp)
if err != nil {
return nil, err
}
u, err := url.Parse(resp.Data.DownloadUrl)
if err != nil {
return nil, err
}
u_ := fmt.Sprintf("https://%s%s", u.Host, u.Path)
res, err := base.NoRedirectClient.R().SetQueryParamsFromValues(u.Query()).Head(u_)
if err != nil {
return nil, err
}
log.Debug(res.String())
link := model.Link{
URL: resp.Data.DownloadUrl,
}
log.Debugln("res code: ", res.StatusCode())
if res.StatusCode() == 302 {
link.URL = res.Header().Get("location")
}
return &link, nil
} else {
return nil, fmt.Errorf("can't convert obj")
}
}
func (d *Pan123) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"driveId": 0,
"etag": "",
"fileName": dirName,
"parentFileId": parentDir.GetID(),
"size": 0,
"type": 1,
}
_, err := d.request("https://www.123pan.com/api/file/upload_request", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Pan123) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
data := base.Json{
"fileIdList": []base.Json{{"FileId": srcObj.GetID()}},
"parentFileId": dstDir.GetID(),
}
_, err := d.request("https://www.123pan.com/api/file/mod_pid", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Pan123) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
data := base.Json{
"driveId": 0,
"fileId": srcObj.GetID(),
"fileName": newName,
}
_, err := d.request("https://www.123pan.com/api/file/rename", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Pan123) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
if f, ok := obj.(File); ok {
data := base.Json{
"driveId": 0,
"operation": true,
"fileTrashInfoList": []File{f},
}
_, err := d.request("https://www.123pan.com/a/api/file/trash", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
} else {
return fmt.Errorf("can't convert obj")
}
}
func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
const DEFAULT int64 = 10485760
var uploadFile io.Reader
h := md5.New()
if d.StreamUpload && stream.GetSize() > DEFAULT {
// 只计算前10MIB
buf := bytes.NewBuffer(make([]byte, 0, DEFAULT))
if n, err := io.CopyN(io.MultiWriter(buf, h), stream, DEFAULT); err != io.EOF && n == 0 {
return err
}
// 增加额外参数防止MD5碰撞
h.Write([]byte(stream.GetName()))
num := make([]byte, 8)
binary.BigEndian.PutUint64(num, uint64(stream.GetSize()))
h.Write(num)
// 拼装
uploadFile = io.MultiReader(buf, stream)
} else {
// 计算完整文件MD5
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
if _, err = io.Copy(h, tempFile); err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
uploadFile = tempFile
}
etag := hex.EncodeToString(h.Sum(nil))
data := base.Json{
"driveId": 0,
"duplicate": 2, // 2->覆盖 1->重命名 0->默认
"etag": etag,
"fileName": stream.GetName(),
"parentFileId": dstDir.GetID(),
"size": stream.GetSize(),
"type": 0,
}
var resp UploadResp
_, err := d.request("https://www.123pan.com/api/file/upload_request", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return err
}
if resp.Data.Key == "" {
return nil
}
cfg := &aws.Config{
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
Region: aws.String("123pan"),
Endpoint: aws.String("file.123pan.com"),
S3ForcePathStyle: aws.Bool(true),
}
s, err := session.NewSession(cfg)
if err != nil {
return err
}
uploader := s3manager.NewUploader(s)
input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket,
Key: &resp.Data.Key,
Body: uploadFile,
}
_, err = uploader.Upload(input)
if err != nil {
return err
}
_, err = d.request("https://www.123pan.com/api/file/upload_complete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"fileId": resp.Data.FileId,
})
}, nil)
return err
}
var _ driver.Driver = (*Pan123)(nil)

30
drivers/123/meta.go Normal file
View File

@ -0,0 +1,30 @@
package _123
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
OrderBy string `json:"order_by" type:"select" options:"name,fileId,updateAt,createAt" default:"name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
driver.RootID
// define other
StreamUpload bool `json:"stream_upload"`
//Field string `json:"field" type:"select" required:"true" options:"a,b,c" default:"a"`
}
var config = driver.Config{
Name: "123Pan",
DefaultRoot: "0",
}
func New() driver.Driver {
return &Pan123{}
}
func init() {
op.RegisterDriver(config, New)
}

89
drivers/123/types.go Normal file
View File

@ -0,0 +1,89 @@
package _123
import (
"strconv"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type BaseResp struct {
Code int `json:"code"`
Message string `json:"message"`
}
type TokenResp struct {
BaseResp
Data struct {
Token string `json:"token"`
} `json:"data"`
}
type File struct {
FileName string `json:"FileName"`
Size int64 `json:"Size"`
UpdateAt time.Time `json:"UpdateAt"`
FileId int64 `json:"FileId"`
Type int `json:"Type"`
Etag string `json:"Etag"`
S3KeyFlag string `json:"S3KeyFlag"`
DownloadUrl string `json:"DownloadUrl"`
}
func (f File) GetPath() string {
return ""
}
func (f File) GetSize() int64 {
return f.Size
}
func (f File) GetName() string {
return f.FileName
}
func (f File) ModTime() time.Time {
return f.UpdateAt
}
func (f File) IsDir() bool {
return f.Type == 1
}
func (f File) GetID() string {
return strconv.FormatInt(f.FileId, 10)
}
var _ model.Obj = (*File)(nil)
//func (f File) Thumb() string {
//
//}
//var _ model.Thumb = (*File)(nil)
type Files struct {
BaseResp
Data struct {
InfoList []File `json:"InfoList"`
Next string `json:"Next"`
} `json:"data"`
}
type DownResp struct {
BaseResp
Data struct {
DownloadUrl string `json:"DownloadUrl"`
} `json:"data"`
}
type UploadResp struct {
BaseResp
Data struct {
AccessKeyId string `json:"AccessKeyId"`
Bucket string `json:"Bucket"`
Key string `json:"Key"`
SecretAccessKey string `json:"SecretAccessKey"`
SessionToken string `json:"SessionToken"`
FileId int64 `json:"FileId"`
} `json:"data"`
}

87
drivers/123/util.go Normal file
View File

@ -0,0 +1,87 @@
package _123
import (
"errors"
"fmt"
"net/http"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
)
// do others that not defined in Driver interface
func (d *Pan123) login() error {
url := "https://www.123pan.com/api/user/sign_in"
var resp TokenResp
_, err := base.RestyClient.R().
SetResult(&resp).
SetBody(base.Json{
"passport": d.Username,
"password": d.Password,
}).Post(url)
if err != nil {
return err
}
if resp.Code != 200 {
err = fmt.Errorf(resp.Message)
} else {
d.AccessToken = resp.Data.Token
}
return err
}
func (d *Pan123) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
body := res.Body()
code := jsoniter.Get(body, "code").ToInt()
if code != 0 {
if code == 401 {
err := d.login()
if err != nil {
return nil, err
}
return d.request(url, method, callback, resp)
}
return nil, errors.New(jsoniter.Get(body, "message").ToString())
}
return body, nil
}
func (d *Pan123) getFiles(parentId string) ([]File, error) {
next := "0"
res := make([]File, 0)
for next != "-1" {
var resp Files
query := map[string]string{
"driveId": "0",
"limit": "100",
"next": next,
"orderBy": d.OrderBy,
"orderDirection": d.OrderDirection,
"parentFileId": parentId,
"trashed": "false",
}
_, err := d.request("https://www.123pan.com/api/file/list/new", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
next = resp.Data.Next
res = append(res, resp.Data.InfoList...)
}
return res, nil
}

322
drivers/139/driver.go Normal file
View File

@ -0,0 +1,322 @@
package _139
import (
"bytes"
"context"
"fmt"
"io"
"math"
"net/http"
"strconv"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
type Yun139 struct {
model.Storage
Addition
}
func (d *Yun139) Config() driver.Config {
return config
}
func (d *Yun139) GetAddition() driver.Additional {
return d.Addition
}
func (d *Yun139) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
"qryUserExternInfoReq": base.Json{
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}, nil)
return err
}
func (d *Yun139) Drop(ctx context.Context) error {
return nil
}
func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if d.isFamily() {
return d.familyGetFiles(dir.GetID())
} else {
return d.getFiles(dir.GetID())
}
}
//func (d *Yun139) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
u, err := d.getLink(file.GetID())
if err != nil {
return nil, err
}
return &model.Link{URL: u}, nil
}
func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"createCatalogExtReq": base.Json{
"parentCatalogID": parentDir.GetID(),
"newCatalogName": dirName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
if d.isFamily() {
data = base.Json{
"cloudID": d.CloudID,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
"docLibName": dirName,
}
pathname = "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
}
_, err := d.post(pathname,
data, nil)
return err
}
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
if d.isFamily() {
return errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": "304",
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
if d.isFamily() {
return errs.NotImplement
}
var data base.Json
var pathname string
if srcObj.IsDir() {
data = base.Json{
"catalogID": srcObj.GetID(),
"catalogName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
} else {
data = base.Json{
"contentID": srcObj.GetID(),
"contentName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
}
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
if d.isFamily() {
return errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": 309,
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
var contentInfoList []string
var catalogInfoList []string
if obj.IsDir() {
catalogInfoList = append(catalogInfoList, obj.GetID())
} else {
contentInfoList = append(contentInfoList, obj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 2,
"actionType": 201,
"taskInfo": base.Json{
"newCatalogID": "",
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
if d.isFamily() {
data = base.Json{
"catalogList": catalogInfoList,
"contentList": contentInfoList,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
"sourceCatalogType": 1002,
"taskType": 2,
}
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
}
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
data := base.Json{
"manualRename": 2,
"operation": 0,
"fileCount": 1,
"totalSize": stream.GetSize(),
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": stream.GetSize(),
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
"parentCatalogID": dstDir.GetID(),
"newCatalogName": "",
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
if d.isFamily() {
data = d.newJson(base.Json{
"fileCount": 1,
"manualRename": 2,
"operation": 0,
"path": "",
"seqNo": "",
"totalSize": stream.GetSize(),
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": stream.GetSize(),
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
})
pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
return errs.NotImplement
}
var resp UploadResp
_, err := d.post(pathname, data, &resp)
if err != nil {
return err
}
var Default int64 = 10485760
part := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
var start int64 = 0
for i := 0; i < part; i++ {
byteSize := stream.GetSize() - start
if byteSize > Default {
byteSize = Default
}
byteData := make([]byte, byteSize)
_, err = io.ReadFull(stream, byteData)
if err != nil {
return err
}
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, bytes.NewBuffer(byteData))
if err != nil {
return err
}
headers := map[string]string{
"Accept": "*/*",
"Content-Type": "text/plain;name=" + unicode(stream.GetName()),
"contentSize": strconv.FormatInt(stream.GetSize(), 10),
"range": fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1),
"content-length": strconv.FormatInt(byteSize, 10),
"uploadtaskID": resp.Data.UploadResult.UploadTaskID,
"rangeType": "0",
"Referer": "https://yun.139.com/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36 Edg/95.0.1020.44",
"x-SvcType": "1",
}
for k, v := range headers {
req.Header.Set(k, v)
}
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
log.Debugf("%+v", res)
res.Body.Close()
start += byteSize
up(i * 100 / part)
}
return nil
}
var _ driver.Driver = (*Yun139)(nil)

25
drivers/139/meta.go Normal file
View File

@ -0,0 +1,25 @@
package _139
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Account string `json:"account" required:"true"`
Cookie string `json:"cookie" type:"text" required:"true"`
driver.RootID
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
CloudID string `json:"cloud_id"`
}
var config = driver.Config{
Name: "139Yun",
LocalSort: true,
}
func init() {
op.RegisterDriver(config, func() driver.Driver {
return &Yun139{}
})
}

187
drivers/139/types.go Normal file
View File

@ -0,0 +1,187 @@
package _139
type BaseResp struct {
Success bool `json:"success"`
Code string `json:"code"`
Message string `json:"message"`
}
type Catalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CatalogType int `json:"catalogType"`
//CreateTime string `json:"createTime"`
UpdateTime string `json:"updateTime"`
//IsShared bool `json:"isShared"`
//CatalogLevel int `json:"catalogLevel"`
//ShareDoneeCount int `json:"shareDoneeCount"`
//OpenType int `json:"openType"`
//ParentCatalogID string `json:"parentCatalogId"`
//DirEtag int `json:"dirEtag"`
//Tombstoned int `json:"tombstoned"`
//ProxyID interface{} `json:"proxyID"`
//Moved int `json:"moved"`
//IsFixedDir int `json:"isFixedDir"`
//IsSynced interface{} `json:"isSynced"`
//Owner string `json:"owner"`
//Modifier interface{} `json:"modifier"`
//Path string `json:"path"`
//ShareType int `json:"shareType"`
//SoftLink interface{} `json:"softLink"`
//ExtProp1 interface{} `json:"extProp1"`
//ExtProp2 interface{} `json:"extProp2"`
//ExtProp3 interface{} `json:"extProp3"`
//ExtProp4 interface{} `json:"extProp4"`
//ExtProp5 interface{} `json:"extProp5"`
//ETagOprType int `json:"ETagOprType"`
}
type Content struct {
ContentID string `json:"contentID"`
ContentName string `json:"contentName"`
//ContentSuffix string `json:"contentSuffix"`
ContentSize int64 `json:"contentSize"`
//ContentDesc string `json:"contentDesc"`
//ContentType int `json:"contentType"`
//ContentOrigin int `json:"contentOrigin"`
UpdateTime string `json:"updateTime"`
//CommentCount int `json:"commentCount"`
ThumbnailURL string `json:"thumbnailURL"`
//BigthumbnailURL string `json:"bigthumbnailURL"`
//PresentURL string `json:"presentURL"`
//PresentLURL string `json:"presentLURL"`
//PresentHURL string `json:"presentHURL"`
//ContentTAGList interface{} `json:"contentTAGList"`
//ShareDoneeCount int `json:"shareDoneeCount"`
//Safestate int `json:"safestate"`
//Transferstate int `json:"transferstate"`
//IsFocusContent int `json:"isFocusContent"`
//UpdateShareTime interface{} `json:"updateShareTime"`
//UploadTime string `json:"uploadTime"`
//OpenType int `json:"openType"`
//AuditResult int `json:"auditResult"`
//ParentCatalogID string `json:"parentCatalogId"`
//Channel string `json:"channel"`
//GeoLocFlag string `json:"geoLocFlag"`
//Digest string `json:"digest"`
//Version string `json:"version"`
//FileEtag string `json:"fileEtag"`
//FileVersion string `json:"fileVersion"`
//Tombstoned int `json:"tombstoned"`
//ProxyID string `json:"proxyID"`
//Moved int `json:"moved"`
//MidthumbnailURL string `json:"midthumbnailURL"`
//Owner string `json:"owner"`
//Modifier string `json:"modifier"`
//ShareType int `json:"shareType"`
//ExtInfo struct {
// Uploader string `json:"uploader"`
// Address string `json:"address"`
//} `json:"extInfo"`
//Exif struct {
// CreateTime string `json:"createTime"`
// Longitude interface{} `json:"longitude"`
// Latitude interface{} `json:"latitude"`
// LocalSaveTime interface{} `json:"localSaveTime"`
//} `json:"exif"`
//CollectionFlag interface{} `json:"collectionFlag"`
//TreeInfo interface{} `json:"treeInfo"`
//IsShared bool `json:"isShared"`
//ETagOprType int `json:"ETagOprType"`
}
type GetDiskResp struct {
BaseResp
Data struct {
Result struct {
ResultCode string `json:"resultCode"`
ResultDesc interface{} `json:"resultDesc"`
} `json:"result"`
GetDiskResult struct {
ParentCatalogID string `json:"parentCatalogID"`
NodeCount int `json:"nodeCount"`
CatalogList []Catalog `json:"catalogList"`
ContentList []Content `json:"contentList"`
IsCompleted int `json:"isCompleted"`
} `json:"getDiskResult"`
} `json:"data"`
}
type UploadResp struct {
BaseResp
Data struct {
Result struct {
ResultCode string `json:"resultCode"`
ResultDesc interface{} `json:"resultDesc"`
} `json:"result"`
UploadResult struct {
UploadTaskID string `json:"uploadTaskID"`
RedirectionURL string `json:"redirectionUrl"`
NewContentIDList []struct {
ContentID string `json:"contentID"`
ContentName string `json:"contentName"`
IsNeedUpload string `json:"isNeedUpload"`
FileEtag int64 `json:"fileEtag"`
FileVersion int64 `json:"fileVersion"`
OverridenFlag int `json:"overridenFlag"`
} `json:"newContentIDList"`
CatalogIDList interface{} `json:"catalogIDList"`
IsSlice interface{} `json:"isSlice"`
} `json:"uploadResult"`
} `json:"data"`
}
type CloudContent struct {
ContentID string `json:"contentID"`
//Modifier string `json:"modifier"`
//Nickname string `json:"nickname"`
//CloudNickName string `json:"cloudNickName"`
ContentName string `json:"contentName"`
//ContentType int `json:"contentType"`
//ContentSuffix string `json:"contentSuffix"`
ContentSize int64 `json:"contentSize"`
//ContentDesc string `json:"contentDesc"`
//CreateTime string `json:"createTime"`
//Shottime interface{} `json:"shottime"`
LastUpdateTime string `json:"lastUpdateTime"`
ThumbnailURL string `json:"thumbnailURL"`
//MidthumbnailURL string `json:"midthumbnailURL"`
//BigthumbnailURL string `json:"bigthumbnailURL"`
//PresentURL string `json:"presentURL"`
//PresentLURL string `json:"presentLURL"`
//PresentHURL string `json:"presentHURL"`
//ParentCatalogID string `json:"parentCatalogID"`
//Uploader string `json:"uploader"`
//UploaderNickName string `json:"uploaderNickName"`
//TreeInfo interface{} `json:"treeInfo"`
//UpdateTime interface{} `json:"updateTime"`
//ExtInfo struct {
// Uploader string `json:"uploader"`
//} `json:"extInfo"`
//EtagOprType interface{} `json:"etagOprType"`
}
type CloudCatalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CloudID string `json:"cloudID"`
//CreateTime string `json:"createTime"`
LastUpdateTime string `json:"lastUpdateTime"`
//Creator string `json:"creator"`
//CreatorNickname string `json:"creatorNickname"`
}
type QueryContentListResp struct {
BaseResp
Data struct {
Result struct {
ResultCode string `json:"resultCode"`
ResultDesc string `json:"resultDesc"`
} `json:"result"`
Path string `json:"path"`
CloudContentList []CloudContent `json:"cloudContentList"`
CloudCatalogList []CloudCatalog `json:"cloudCatalogList"`
TotalCount int `json:"totalCount"`
RecallContent interface{} `json:"recallContent"`
} `json:"data"`
}

247
drivers/139/util.go Normal file
View File

@ -0,0 +1,247 @@
package _139
import (
"encoding/base64"
"errors"
"fmt"
"net/http"
"net/url"
"sort"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/pkg/utils/random"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *Yun139) isFamily() bool {
return d.Type == "family"
}
func encodeURIComponent(str string) string {
r := url.QueryEscape(str)
r = strings.Replace(r, "+", "%20", -1)
return r
}
func calSign(body, ts, randStr string) string {
body = strings.ReplaceAll(body, "\n", "")
body = strings.ReplaceAll(body, " ", "")
body = encodeURIComponent(body)
strs := strings.Split(body, "")
sort.Strings(strs)
body = strings.Join(strs, "")
body = base64.StdEncoding.EncodeToString([]byte(body))
res := utils.GetMD5Encode(body) + utils.GetMD5Encode(ts+":"+randStr)
res = strings.ToUpper(utils.GetMD5Encode(res))
return res
}
func getTime(t string) time.Time {
stamp, _ := time.ParseInLocation("20060102150405", t, time.Local)
return stamp
}
func (d *Yun139) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
url := "https://yun.139.com" + pathname
req := base.RestyClient.R()
randStr := random.String(16)
ts := time.Now().Format("2006-01-02 15:04:05")
if callback != nil {
callback(req)
}
body, err := utils.Json.Marshal(req.Body)
if err != nil {
return nil, err
}
sign := calSign(string(body), ts, randStr)
svcType := "1"
if d.isFamily() {
svcType = "2"
}
req.SetHeaders(map[string]string{
"Accept": "application/json, text/plain, */*",
"CMS-DEVICE": "default",
"Cookie": d.Cookie,
"mcloud-channel": "1000101",
"mcloud-client": "10701",
//"mcloud-route": "001",
"mcloud-sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
//"mcloud-skey":"",
"mcloud-version": "6.6.0",
"Origin": "https://yun.139.com",
"Referer": "https://yun.139.com/w/",
"x-DeviceInfo": "||9|6.6.0|chrome|95.0.4638.69|uwIy75obnsRPIwlJSd7D9GhUvFwG96ce||macos 10.15.2||zh-CN|||",
"x-huawei-channelSrc": "10000034",
"x-inner-ntwk": "2",
"x-m4c-caller": "PC",
"x-m4c-src": "10002",
"x-SvcType": svcType,
})
var e BaseResp
req.SetResult(&e)
res, err := req.Execute(method, url)
log.Debugln(res.String())
if !e.Success {
return nil, errors.New(e.Message)
}
if resp != nil {
err = utils.Json.Unmarshal(res.Body(), resp)
if err != nil {
return nil, err
}
}
return res.Body(), nil
}
func (d *Yun139) post(pathname string, data interface{}, resp interface{}) ([]byte, error) {
return d.request(pathname, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, resp)
}
func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
start := 0
limit := 100
files := make([]model.Obj, 0)
for {
data := base.Json{
"catalogID": catalogID,
"sortDirection": 1,
"startNumber": start + 1,
"endNumber": start + limit,
"filterType": 0,
"catalogSortType": 0,
"contentSortType": 0,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
var resp GetDiskResp
_, err := d.post("/orchestration/personalCloud/catalog/v1.0/getDisk", data, &resp)
if err != nil {
return nil, err
}
for _, catalog := range resp.Data.GetDiskResult.CatalogList {
f := model.Object{
ID: catalog.CatalogID,
Name: catalog.CatalogName,
Size: 0,
Modified: getTime(catalog.UpdateTime),
IsFolder: true,
}
files = append(files, &f)
}
for _, content := range resp.Data.GetDiskResult.ContentList {
f := model.ObjThumb{
Object: model.Object{
ID: content.ContentID,
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.UpdateTime),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
}
files = append(files, &f)
}
if start+limit >= resp.Data.GetDiskResult.NodeCount {
break
}
start += limit
}
return files, nil
}
func (d *Yun139) newJson(data map[string]interface{}) base.Json {
common := map[string]interface{}{
"catalogType": 3,
"cloudID": d.CloudID,
"cloudType": 1,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
return utils.MergeMap(data, common)
}
func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
pageNum := 1
files := make([]model.Obj, 0)
for {
data := d.newJson(base.Json{
"catalogID": catalogID,
"contentSortType": 0,
"pageInfo": base.Json{
"pageNum": pageNum,
"pageSize": 100,
},
"sortDirection": 1,
})
var resp QueryContentListResp
_, err := d.post("/orchestration/familyCloud/content/v1.0/queryContentList", data, &resp)
if err != nil {
return nil, err
}
for _, catalog := range resp.Data.CloudCatalogList {
f := model.Object{
ID: catalog.CatalogID,
Name: catalog.CatalogName,
Size: 0,
IsFolder: true,
Modified: getTime(catalog.LastUpdateTime),
}
files = append(files, &f)
}
for _, content := range resp.Data.CloudContentList {
f := model.ObjThumb{
Object: model.Object{
ID: content.ContentID,
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.LastUpdateTime),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
}
files = append(files, &f)
}
if 100*pageNum > resp.Data.TotalCount {
break
}
pageNum++
}
return files, nil
}
func (d *Yun139) getLink(contentId string) (string, error) {
data := base.Json{
"appName": "",
"contentID": contentId,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
res, err := d.post("/orchestration/personalCloud/uploadAndDownload/v1.0/downloadRequest",
data, nil)
if err != nil {
return "", err
}
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
}
func unicode(str string) string {
textQuoted := strconv.QuoteToASCII(str)
textUnquoted := textQuoted[1 : len(textQuoted)-1]
return textUnquoted
}

210
drivers/189/driver.go Normal file
View File

@ -0,0 +1,210 @@
package _189
import (
"context"
"net/http"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type Cloud189 struct {
model.Storage
Addition
client *resty.Client
rsa Rsa
sessionKey string
}
func (d *Cloud189) Config() driver.Config {
return config
}
func (d *Cloud189) GetAddition() driver.Additional {
return d.Addition
}
func (d *Cloud189) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
d.client = resty.New().
SetTimeout(base.DefaultTimeout).
SetRetryCount(3).
SetHeader("Referer", "https://cloud.189.cn/").
SetHeader("User-Agent", base.UserAgent)
return d.login()
}
func (d *Cloud189) Drop(ctx context.Context) error {
return nil
}
func (d *Cloud189) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
return d.getFiles(dir.GetID())
}
//func (d *Cloud189) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *Cloud189) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp DownResp
u := "https://cloud.189.cn/api/portal/getFileInfo.action"
_, err := d.request(u, http.MethodGet, func(req *resty.Request) {
req.SetQueryParam("fileId", file.GetID())
}, &resp)
if err != nil {
return nil, err
}
client := resty.NewWithClient(d.client.GetClient()).SetRedirectPolicy(
resty.RedirectPolicyFunc(func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
}))
res, err := client.R().SetHeader("User-Agent", base.UserAgent).Get("https:" + resp.FileDownloadUrl)
if err != nil {
return nil, err
}
log.Debugln(res.Status())
log.Debugln(res.String())
link := model.Link{}
log.Debugln("first url:", resp.FileDownloadUrl)
if res.StatusCode() == 302 {
link.URL = res.Header().Get("location")
log.Debugln("second url:", link.URL)
_, _ = client.R().Get(link.URL)
if res.StatusCode() == 302 {
link.URL = res.Header().Get("location")
}
log.Debugln("third url:", link.URL)
} else {
link.URL = resp.FileDownloadUrl
}
link.URL = strings.Replace(link.URL, "http://", "https://", 1)
return &link, nil
}
func (d *Cloud189) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
form := map[string]string{
"parentFolderId": parentDir.GetID(),
"folderName": dirName,
}
_, err := d.request("https://cloud.189.cn/api/open/file/createFolder.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
isFolder := 0
if srcObj.IsDir() {
isFolder = 1
}
taskInfos := []base.Json{
{
"fileId": srcObj.GetID(),
"fileName": srcObj.GetName(),
"isFolder": isFolder,
},
}
taskInfosBytes, err := utils.Json.Marshal(taskInfos)
if err != nil {
return err
}
form := map[string]string{
"type": "MOVE",
"targetFolderId": dstDir.GetID(),
"taskInfos": string(taskInfosBytes),
}
_, err = d.request("https://cloud.189.cn/api/open/batch/createBatchTask.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
url := "https://cloud.189.cn/api/open/file/renameFile.action"
idKey := "fileId"
nameKey := "destFileName"
if srcObj.IsDir() {
url = "https://cloud.189.cn/api/open/file/renameFolder.action"
idKey = "folderId"
nameKey = "destFolderName"
}
form := map[string]string{
idKey: srcObj.GetID(),
nameKey: newName,
}
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
isFolder := 0
if srcObj.IsDir() {
isFolder = 1
}
taskInfos := []base.Json{
{
"fileId": srcObj.GetID(),
"fileName": srcObj.GetName(),
"isFolder": isFolder,
},
}
taskInfosBytes, err := utils.Json.Marshal(taskInfos)
if err != nil {
return err
}
form := map[string]string{
"type": "COPY",
"targetFolderId": dstDir.GetID(),
"taskInfos": string(taskInfosBytes),
}
_, err = d.request("https://cloud.189.cn/api/open/batch/createBatchTask.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Remove(ctx context.Context, obj model.Obj) error {
isFolder := 0
if obj.IsDir() {
isFolder = 1
}
taskInfos := []base.Json{
{
"fileId": obj.GetID(),
"fileName": obj.GetName(),
"isFolder": isFolder,
},
}
taskInfosBytes, err := utils.Json.Marshal(taskInfos)
if err != nil {
return err
}
form := map[string]string{
"type": "DELETE",
"targetFolderId": "",
"taskInfos": string(taskInfosBytes),
}
_, err = d.request("https://cloud.189.cn/api/open/batch/createBatchTask.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
return d.newUpload(dstDir, stream, up)
}
var _ driver.Driver = (*Cloud189)(nil)

186
drivers/189/help.go Normal file
View File

@ -0,0 +1,186 @@
package _189
import (
"bytes"
"crypto/aes"
"crypto/hmac"
"crypto/md5"
"crypto/rand"
"crypto/rsa"
"crypto/sha1"
"crypto/x509"
"encoding/base64"
"encoding/hex"
"encoding/pem"
"fmt"
"net/url"
"regexp"
"strconv"
"strings"
myrand "github.com/alist-org/alist/v3/pkg/utils/random"
log "github.com/sirupsen/logrus"
)
func random() string {
return fmt.Sprintf("0.%17v", myrand.Rand.Int63n(100000000000000000))
}
func RsaEncode(origData []byte, j_rsakey string, hex bool) string {
publicKey := []byte("-----BEGIN PUBLIC KEY-----\n" + j_rsakey + "\n-----END PUBLIC KEY-----")
block, _ := pem.Decode(publicKey)
pubInterface, _ := x509.ParsePKIXPublicKey(block.Bytes)
pub := pubInterface.(*rsa.PublicKey)
b, err := rsa.EncryptPKCS1v15(rand.Reader, pub, origData)
if err != nil {
log.Errorf("err: %s", err.Error())
}
res := base64.StdEncoding.EncodeToString(b)
if hex {
return b64tohex(res)
}
return res
}
var b64map = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
var BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz"
func int2char(a int) string {
return strings.Split(BI_RM, "")[a]
}
func b64tohex(a string) string {
d := ""
e := 0
c := 0
for i := 0; i < len(a); i++ {
m := strings.Split(a, "")[i]
if m != "=" {
v := strings.Index(b64map, m)
if 0 == e {
e = 1
d += int2char(v >> 2)
c = 3 & v
} else if 1 == e {
e = 2
d += int2char(c<<2 | v>>4)
c = 15 & v
} else if 2 == e {
e = 3
d += int2char(c)
d += int2char(v >> 2)
c = 3 & v
} else {
e = 0
d += int2char(c<<2 | v>>4)
d += int2char(15 & v)
}
}
}
if e == 1 {
d += int2char(c << 2)
}
return d
}
func qs(form map[string]string) string {
f := make(url.Values)
for k, v := range form {
f.Set(k, v)
}
return EncodeParam(f)
//strList := make([]string, 0)
//for k, v := range form {
// strList = append(strList, fmt.Sprintf("%s=%s", k, url.QueryEscape(v)))
//}
//return strings.Join(strList, "&")
}
func EncodeParam(v url.Values) string {
if v == nil {
return ""
}
var buf strings.Builder
keys := make([]string, 0, len(v))
for k := range v {
keys = append(keys, k)
}
for _, k := range keys {
vs := v[k]
for _, v := range vs {
if buf.Len() > 0 {
buf.WriteByte('&')
}
buf.WriteString(k)
buf.WriteByte('=')
//if k == "fileName" {
// buf.WriteString(encode(v))
//} else {
buf.WriteString(v)
//}
}
}
return buf.String()
}
func encode(str string) string {
//str = strings.ReplaceAll(str, "%", "%25")
//str = strings.ReplaceAll(str, "&", "%26")
//str = strings.ReplaceAll(str, "+", "%2B")
//return str
return url.QueryEscape(str)
}
func AesEncrypt(data, key []byte) []byte {
block, _ := aes.NewCipher(key)
if block == nil {
return []byte{}
}
data = PKCS7Padding(data, block.BlockSize())
decrypted := make([]byte, len(data))
size := block.BlockSize()
for bs, be := 0, size; bs < len(data); bs, be = bs+size, be+size {
block.Encrypt(decrypted[bs:be], data[bs:be])
}
return decrypted
}
func PKCS7Padding(ciphertext []byte, blockSize int) []byte {
padding := blockSize - len(ciphertext)%blockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
return append(ciphertext, padtext...)
}
func hmacSha1(data string, secret string) string {
h := hmac.New(sha1.New, []byte(secret))
h.Write([]byte(data))
return hex.EncodeToString(h.Sum(nil))
}
func getMd5(data []byte) []byte {
h := md5.New()
h.Write(data)
return h.Sum(nil)
}
func decodeURIComponent(str string) string {
r, _ := url.PathUnescape(str)
//r = strings.ReplaceAll(r, " ", "+")
return r
}
func Random(v string) string {
reg := regexp.MustCompilePOSIX("[xy]")
data := reg.ReplaceAllFunc([]byte(v), func(msg []byte) []byte {
var i int64
t := int64(16 * myrand.Rand.Float32())
if msg[0] == 120 {
i = t
} else {
i = 3&t | 8
}
return []byte(strconv.FormatInt(i, 16))
})
return string(data)
}

24
drivers/189/meta.go Normal file
View File

@ -0,0 +1,24 @@
package _189
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
driver.RootID
}
var config = driver.Config{
Name: "189Cloud",
LocalSort: true,
DefaultRoot: "-11",
}
func init() {
op.RegisterDriver(config, func() driver.Driver {
return &Cloud189{}
})
}

68
drivers/189/types.go Normal file
View File

@ -0,0 +1,68 @@
package _189
type LoginResp struct {
Msg string `json:"msg"`
Result int `json:"result"`
ToUrl string `json:"toUrl"`
}
type Error struct {
ErrorCode string `json:"errorCode"`
ErrorMsg string `json:"errorMsg"`
}
type File struct {
Id int64 `json:"id"`
LastOpTime string `json:"lastOpTime"`
Name string `json:"name"`
Size int64 `json:"size"`
Icon struct {
SmallUrl string `json:"smallUrl"`
//LargeUrl string `json:"largeUrl"`
} `json:"icon"`
Url string `json:"url"`
}
type Folder struct {
Id int64 `json:"id"`
LastOpTime string `json:"lastOpTime"`
Name string `json:"name"`
}
type Files struct {
ResCode int `json:"res_code"`
ResMessage string `json:"res_message"`
FileListAO struct {
Count int `json:"count"`
FileList []File `json:"fileList"`
FolderList []Folder `json:"folderList"`
} `json:"fileListAO"`
}
type UploadUrlsResp struct {
Code string `json:"code"`
UploadUrls map[string]Part `json:"uploadUrls"`
}
type Part struct {
RequestURL string `json:"requestURL"`
RequestHeader string `json:"requestHeader"`
}
type Rsa struct {
Expire int64 `json:"expire"`
PkId string `json:"pkId"`
PubKey string `json:"pubKey"`
}
type Down struct {
ResCode int `json:"res_code"`
ResMessage string `json:"res_message"`
FileDownloadUrl string `json:"fileDownloadUrl"`
}
type DownResp struct {
ResCode int `json:"res_code"`
ResMessage string `json:"res_message"`
FileDownloadUrl string `json:"downloadUrl"`
}

394
drivers/189/util.go Normal file
View File

@ -0,0 +1,394 @@
package _189
import (
"bytes"
"crypto/md5"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"io"
"math"
"net/http"
"regexp"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/setting"
"github.com/alist-org/alist/v3/pkg/utils"
myrand "github.com/alist-org/alist/v3/pkg/utils/random"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *Cloud189) login() error {
url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
b := ""
lt := ""
ltText := regexp.MustCompile(`lt = "(.+?)"`)
var res *resty.Response
var err error
for i := 0; i < 3; i++ {
res, err = d.client.R().Get(url)
if err != nil {
return err
}
// 已经登陆
if res.RawResponse.Request.URL.String() == "https://cloud.189.cn/web/main" {
return nil
}
b = res.String()
ltTextArr := ltText.FindStringSubmatch(b)
if len(ltTextArr) > 0 {
lt = ltTextArr[1]
break
} else {
<-time.After(time.Second)
}
}
if lt == "" {
return fmt.Errorf("get page: %s \nstatus: %d \nrequest url: %s\nredirect url: %s",
b, res.StatusCode(), res.RawResponse.Request.URL.String(), res.Header().Get("location"))
}
captchaToken := regexp.MustCompile(`captchaToken' value='(.+?)'`).FindStringSubmatch(b)[1]
returnUrl := regexp.MustCompile(`returnUrl = '(.+?)'`).FindStringSubmatch(b)[1]
paramId := regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(b)[1]
//reqId := regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(b)[1]
jRsakey := regexp.MustCompile(`j_rsaKey" value="(\S+)"`).FindStringSubmatch(b)[1]
vCodeID := regexp.MustCompile(`picCaptcha\.do\?token\=([A-Za-z0-9\&\=]+)`).FindStringSubmatch(b)[1]
vCodeRS := ""
if vCodeID != "" {
// need ValidateCode
log.Debugf("try to identify verification codes")
timeStamp := strconv.FormatInt(time.Now().UnixNano()/1e6, 10)
u := "https://open.e.189.cn/api/logbox/oauth2/picCaptcha.do?token=" + vCodeID + timeStamp
imgRes, err := d.client.R().SetHeaders(map[string]string{
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0",
"Referer": "https://open.e.189.cn/api/logbox/oauth2/unifyAccountLogin.do",
"Sec-Fetch-Dest": "image",
"Sec-Fetch-Mode": "no-cors",
"Sec-Fetch-Site": "same-origin",
}).Get(u)
if err != nil {
return err
}
// Enter the verification code manually
//err = message.GetMessenger().WaitSend(message.Message{
// Type: "image",
// Content: "data:image/png;base64," + base64.StdEncoding.EncodeToString(imgRes.Body()),
//}, 10)
//if err != nil {
// return err
//}
//vCodeRS, err = message.GetMessenger().WaitReceive(30)
// use ocr api
vRes, err := base.RestyClient.R().SetMultipartField(
"image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
Post(setting.GetStr(conf.OcrApi))
if err != nil {
return err
}
if jsoniter.Get(vRes.Body(), "status").ToInt() != 200 {
return errors.New("ocr error:" + jsoniter.Get(vRes.Body(), "msg").ToString())
}
vCodeRS = jsoniter.Get(vRes.Body(), "result").ToString()
log.Debugln("code: ", vCodeRS)
}
userRsa := RsaEncode([]byte(d.Username), jRsakey, true)
passwordRsa := RsaEncode([]byte(d.Password), jRsakey, true)
url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
var loginResp LoginResp
res, err = d.client.R().
SetHeaders(map[string]string{
"lt": lt,
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Referer": "https://open.e.189.cn/",
"accept": "application/json;charset=UTF-8",
}).SetFormData(map[string]string{
"appKey": "cloud",
"accountType": "01",
"userName": "{RSA}" + userRsa,
"password": "{RSA}" + passwordRsa,
"validateCode": vCodeRS,
"captchaToken": captchaToken,
"returnUrl": returnUrl,
"mailSuffix": "@pan.cn",
"paramId": paramId,
"clientType": "10010",
"dynamicCheck": "FALSE",
"cb_SaveName": "1",
"isOauth2": "false",
}).Post(url)
if err != nil {
return err
}
err = utils.Json.Unmarshal(res.Body(), &loginResp)
if err != nil {
log.Error(err.Error())
return err
}
if loginResp.Result != 0 {
return fmt.Errorf(loginResp.Msg)
}
_, err = d.client.R().Get(loginResp.ToUrl)
return err
}
func (d *Cloud189) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
var e Error
req := d.client.R().SetError(&e).
SetHeader("Accept", "application/json;charset=UTF-8").
SetQueryParams(map[string]string{
"noCache": random(),
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
//log.Debug(res.String())
if e.ErrorCode != "" {
if e.ErrorCode == "InvalidSessionKey" {
err = d.login()
if err != nil {
return nil, err
}
return d.request(url, method, callback, resp)
}
}
if jsoniter.Get(res.Body(), "res_code").ToInt() != 0 {
err = errors.New(jsoniter.Get(res.Body(), "res_message").ToString())
}
return res.Body(), err
}
func (d *Cloud189) getFiles(fileId string) ([]model.Obj, error) {
res := make([]model.Obj, 0)
pageNum := 1
loc, _ := time.LoadLocation("Local")
for {
var resp Files
_, err := d.request("https://cloud.189.cn/api/open/file/listFiles.action", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(map[string]string{
//"noCache": random(),
"pageSize": "60",
"pageNum": strconv.Itoa(pageNum),
"mediaType": "0",
"folderId": fileId,
"iconOption": "5",
"orderBy": "lastOpTime", //account.OrderBy
"descending": "true", //account.OrderDirection
})
}, &resp)
if err != nil {
return nil, err
}
if resp.FileListAO.Count == 0 {
break
}
for _, folder := range resp.FileListAO.FolderList {
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05", folder.LastOpTime, loc)
res = append(res, &model.Object{
ID: strconv.FormatInt(folder.Id, 10),
Name: folder.Name,
Modified: lastOpTime,
IsFolder: true,
})
}
for _, file := range resp.FileListAO.FileList {
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05", file.LastOpTime, loc)
res = append(res, &model.ObjThumb{
Object: model.Object{
ID: strconv.FormatInt(file.Id, 10),
Name: file.Name,
Modified: lastOpTime,
},
Thumbnail: model.Thumbnail{Thumbnail: file.Icon.SmallUrl},
})
}
pageNum++
}
return res, nil
}
func (d *Cloud189) oldUpload(dstDir model.Obj, file model.FileStreamer) error {
res, err := d.client.R().SetMultipartFormData(map[string]string{
"parentId": dstDir.GetID(),
"sessionKey": "??",
"opertype": "1",
"fname": file.GetName(),
}).SetMultipartField("Filedata", file.GetName(), file.GetMimetype(), file).Post("https://hb02.upload.cloud.189.cn/v1/DCIWebUploadAction")
if err != nil {
return err
}
if utils.Json.Get(res.Body(), "MD5").ToString() != "" {
return nil
}
log.Debugf(res.String())
return errors.New(res.String())
}
func (d *Cloud189) getSessionKey() (string, error) {
resp, err := d.request("https://cloud.189.cn/v2/getUserBriefInfo.action", http.MethodGet, nil, nil)
if err != nil {
return "", err
}
sessionKey := utils.Json.Get(resp, "sessionKey").ToString()
return sessionKey, nil
}
func (d *Cloud189) getResKey() (string, string, error) {
now := time.Now().UnixMilli()
if d.rsa.Expire > now {
return d.rsa.PubKey, d.rsa.PkId, nil
}
resp, err := d.request("https://cloud.189.cn/api/security/generateRsaKey.action", http.MethodGet, nil, nil)
if err != nil {
return "", "", err
}
pubKey, pkId := utils.Json.Get(resp, "pubKey").ToString(), utils.Json.Get(resp, "pkId").ToString()
d.rsa.PubKey, d.rsa.PkId = pubKey, pkId
d.rsa.Expire = utils.Json.Get(resp, "expire").ToInt64()
return pubKey, pkId, nil
}
func (d *Cloud189) uploadRequest(uri string, form map[string]string, resp interface{}) ([]byte, error) {
c := strconv.FormatInt(time.Now().UnixMilli(), 10)
r := Random("xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx")
l := Random("xxxxxxxxxxxx4xxxyxxxxxxxxxxxxxxx")
l = l[0 : 16+int(16*myrand.Rand.Float32())]
e := qs(form)
data := AesEncrypt([]byte(e), []byte(l[0:16]))
h := hex.EncodeToString(data)
sessionKey := d.sessionKey
signature := hmacSha1(fmt.Sprintf("SessionKey=%s&Operate=GET&RequestURI=%s&Date=%s&params=%s", sessionKey, uri, c, h), l)
pubKey, pkId, err := d.getResKey()
if err != nil {
return nil, err
}
b := RsaEncode([]byte(l), pubKey, false)
req := d.client.R().SetHeaders(map[string]string{
"accept": "application/json;charset=UTF-8",
"SessionKey": sessionKey,
"Signature": signature,
"X-Request-Date": c,
"X-Request-ID": r,
"EncryptionText": b,
"PkId": pkId,
})
if resp != nil {
req.SetResult(resp)
}
res, err := req.Get("https://upload.cloud.189.cn" + uri + "?params=" + h)
if err != nil {
return nil, err
}
data = res.Body()
if utils.Json.Get(data, "code").ToString() != "SUCCESS" {
return nil, errors.New(uri + "---" + jsoniter.Get(data, "msg").ToString())
}
return data, nil
}
func (d *Cloud189) newUpload(dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
sessionKey, err := d.getSessionKey()
if err != nil {
return err
}
d.sessionKey = sessionKey
const DEFAULT int64 = 10485760
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
res, err := d.uploadRequest("/person/initMultiUpload", map[string]string{
"parentFolderId": dstDir.GetID(),
"fileName": encode(file.GetName()),
"fileSize": strconv.FormatInt(file.GetSize(), 10),
"sliceSize": strconv.FormatInt(DEFAULT, 10),
"lazyCheck": "1",
}, nil)
if err != nil {
return err
}
uploadFileId := jsoniter.Get(res, "data", "uploadFileId").ToString()
//_, err = d.uploadRequest("/person/getUploadedPartsInfo", map[string]string{
// "uploadFileId": uploadFileId,
//}, nil)
var finish int64 = 0
var i int64
var byteSize int64
md5s := make([]string, 0)
md5Sum := md5.New()
for i = 1; i <= count; i++ {
byteSize = file.GetSize() - finish
if DEFAULT < byteSize {
byteSize = DEFAULT
}
//log.Debugf("%d,%d", byteSize, finish)
byteData := make([]byte, byteSize)
n, err := io.ReadFull(file, byteData)
//log.Debug(err, n)
if err != nil {
return err
}
finish += int64(n)
md5Bytes := getMd5(byteData)
md5Hex := hex.EncodeToString(md5Bytes)
md5Base64 := base64.StdEncoding.EncodeToString(md5Bytes)
md5s = append(md5s, strings.ToUpper(md5Hex))
md5Sum.Write(byteData)
var resp UploadUrlsResp
res, err = d.uploadRequest("/person/getMultiUploadUrls", map[string]string{
"partInfo": fmt.Sprintf("%s-%s", strconv.FormatInt(i, 10), md5Base64),
"uploadFileId": uploadFileId,
}, &resp)
if err != nil {
return err
}
uploadData := resp.UploadUrls["partNumber_"+strconv.FormatInt(i, 10)]
log.Debugf("uploadData: %+v", uploadData)
requestURL := uploadData.RequestURL
uploadHeaders := strings.Split(decodeURIComponent(uploadData.RequestHeader), "&")
req, _ := http.NewRequest(http.MethodPut, requestURL, bytes.NewReader(byteData))
for _, v := range uploadHeaders {
i := strings.Index(v, "=")
req.Header.Set(v[0:i], v[i+1:])
}
r, err := base.HttpClient.Do(req)
log.Debugf("%+v %+v", r, r.Request.Header)
r.Body.Close()
if err != nil {
return err
}
up(int(i * 100 / count))
}
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
sliceMd5 := fileMd5
if file.GetSize() > DEFAULT {
sliceMd5 = utils.GetMD5Encode(strings.Join(md5s, "\n"))
}
res, err = d.uploadRequest("/person/commitMultiUploadFile", map[string]string{
"uploadFileId": uploadFileId,
"fileMd5": fileMd5,
"sliceMd5": sliceMd5,
"lazyCheck": "1",
"opertype": "3",
}, nil)
return err
}

278
drivers/189pc/driver.go Normal file
View File

@ -0,0 +1,278 @@
package _189pc
import (
"context"
"net/http"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
type Yun189PC struct {
model.Storage
Addition
identity string
client *resty.Client
putClient *resty.Client
loginParam *LoginParam
tokenInfo *AppSessionResp
}
func (y *Yun189PC) Config() driver.Config {
return config
}
func (y *Yun189PC) GetAddition() driver.Additional {
return y.Addition
}
func (y *Yun189PC) Init(ctx context.Context, storage model.Storage) (err error) {
y.Storage = storage
if err = utils.Json.UnmarshalFromString(y.Storage.Addition, &y.Addition); err != nil {
return err
}
// 处理个人云和家庭云参数
if y.isFamily() && y.RootFolderID == "-11" {
y.RootFolderID = ""
}
if !y.isFamily() && y.RootFolderID == "" {
y.RootFolderID = "-11"
y.FamilyID = ""
}
// 初始化请求客户端
if y.client == nil {
y.client = base.NewRestyClient().SetHeaders(map[string]string{
"Accept": "application/json;charset=UTF-8",
"Referer": WEB_URL,
})
}
if y.putClient == nil {
y.putClient = base.NewRestyClient().SetTimeout(120 * time.Second)
}
// 避免重复登陆
identity := utils.GetMD5Encode(y.Username + y.Password)
if !y.isLogin() || y.identity != identity {
y.identity = identity
if err = y.login(); err != nil {
return
}
}
// 处理家庭云ID
if y.isFamily() && y.FamilyID == "" {
if y.FamilyID, err = y.getFamilyID(); err != nil {
return err
}
}
return
}
func (y *Yun189PC) Drop(ctx context.Context) error {
return nil
}
func (y *Yun189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
return y.getFiles(ctx, dir.GetID())
}
func (y *Yun189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var downloadUrl struct {
URL string `json:"fileDownloadUrl"`
}
fullUrl := API_URL
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/getFileDownloadUrl.action"
_, err := y.get(fullUrl, func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParam("fileId", file.GetID())
if y.isFamily() {
r.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
})
} else {
r.SetQueryParams(map[string]string{
"dt": "3",
"flag": "1",
})
}
}, &downloadUrl)
if err != nil {
return nil, err
}
// 重定向获取真实链接
downloadUrl.URL = strings.Replace(strings.ReplaceAll(downloadUrl.URL, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetContext(ctx).Get(downloadUrl.URL)
if err != nil {
return nil, err
}
if res.StatusCode() == 302 {
downloadUrl.URL = res.Header().Get("location")
}
like := &model.Link{
URL: downloadUrl.URL,
Header: http.Header{
"User-Agent": []string{base.UserAgent},
},
}
/*
// 获取链接有效时常
strs := regexp.MustCompile(`(?i)expire[^=]*=([0-9]*)`).FindStringSubmatch(downloadUrl.URL)
if len(strs) == 2 {
timestamp, err := strconv.ParseInt(strs[1], 10, 64)
if err == nil {
expired := time.Duration(timestamp-time.Now().Unix()) * time.Second
like.Expiration = &expired
}
}
*/
return like, nil
}
func (y *Yun189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
fullUrl := API_URL
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/createFolder.action"
_, err := y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
req.SetQueryParams(map[string]string{
"folderName": dirName,
"relativePath": "",
})
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"parentId": parentDir.GetID(),
})
} else {
req.SetQueryParams(map[string]string{
"parentFolderId": parentDir.GetID(),
})
}
}, nil)
return err
}
func (y *Yun189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "MOVE",
"taskInfos": MustString(utils.Json.MarshalToString(
[]BatchTaskInfo{
{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
},
})),
"targetFolderId": dstDir.GetID(),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, nil)
return err
}
func (y *Yun189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
queryParam := make(map[string]string)
fullUrl := API_URL
method := http.MethodPost
if y.isFamily() {
fullUrl += "/family/file"
method = http.MethodGet
queryParam["familyId"] = y.FamilyID
}
if srcObj.IsDir() {
fullUrl += "/renameFolder.action"
queryParam["folderId"] = srcObj.GetID()
queryParam["destFolderName"] = newName
} else {
fullUrl += "/renameFile.action"
queryParam["fileId"] = srcObj.GetID()
queryParam["destFileName"] = newName
}
_, err := y.request(fullUrl, method, func(req *resty.Request) {
req.SetContext(ctx)
req.SetQueryParams(queryParam)
}, nil, nil)
return err
}
func (y *Yun189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "COPY",
"taskInfos": MustString(utils.Json.MarshalToString(
[]BatchTaskInfo{
{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
},
})),
"targetFolderId": dstDir.GetID(),
"targetFileName": dstDir.GetName(),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, nil)
return err
}
func (y *Yun189PC) Remove(ctx context.Context, obj model.Obj) error {
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "DELETE",
"taskInfos": MustString(utils.Json.MarshalToString(
[]*BatchTaskInfo{
{
FileId: obj.GetID(),
FileName: obj.GetName(),
IsFolder: BoolToNumber(obj.IsDir()),
},
})),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, nil)
return err
}
func (y *Yun189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
if y.RapidUpload {
return y.FastUpload(ctx, dstDir, stream, up)
}
return y.CommonUpload(ctx, dstDir, stream, up)
}

131
drivers/189pc/help.go Normal file
View File

@ -0,0 +1,131 @@
package _189pc
import (
"bytes"
"crypto/aes"
"crypto/hmac"
"crypto/rand"
"crypto/rsa"
"crypto/sha1"
"crypto/x509"
"encoding/hex"
"encoding/pem"
"fmt"
"net/http"
"regexp"
"strings"
"time"
"github.com/alist-org/alist/v3/pkg/utils/random"
)
func clientSuffix() map[string]string {
rand := random.Rand
return map[string]string{
"clientType": PC,
"version": VERSION,
"channelId": CHANNEL_ID,
"rand": fmt.Sprintf("%d_%d", rand.Int63n(1e5), rand.Int63n(1e10)),
}
}
// 带params的SignatureOfHmac HMAC签名
func signatureOfHmac(sessionSecret, sessionKey, operate, fullUrl, dateOfGmt, param string) string {
urlpath := regexp.MustCompile(`://[^/]+((/[^/\s?#]+)*)`).FindStringSubmatch(fullUrl)[1]
mac := hmac.New(sha1.New, []byte(sessionSecret))
data := fmt.Sprintf("SessionKey=%s&Operate=%s&RequestURI=%s&Date=%s", sessionKey, operate, urlpath, dateOfGmt)
if param != "" {
data += fmt.Sprintf("&params=%s", param)
}
mac.Write([]byte(data))
return strings.ToUpper(hex.EncodeToString(mac.Sum(nil)))
}
// RAS 加密用户名密码
func RsaEncrypt(publicKey, origData string) string {
block, _ := pem.Decode([]byte(publicKey))
pubInterface, _ := x509.ParsePKIXPublicKey(block.Bytes)
data, _ := rsa.EncryptPKCS1v15(rand.Reader, pubInterface.(*rsa.PublicKey), []byte(origData))
return strings.ToUpper(hex.EncodeToString(data))
}
// aes 加密params
func AesECBEncrypt(data, key string) string {
block, _ := aes.NewCipher([]byte(key))
paddingData := PKCS7Padding([]byte(data), block.BlockSize())
decrypted := make([]byte, len(paddingData))
size := block.BlockSize()
for src, dst := paddingData, decrypted; len(src) > 0; src, dst = src[size:], dst[size:] {
block.Encrypt(dst[:size], src[:size])
}
return strings.ToUpper(hex.EncodeToString(decrypted))
}
func PKCS7Padding(ciphertext []byte, blockSize int) []byte {
padding := blockSize - len(ciphertext)%blockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
return append(ciphertext, padtext...)
}
// 获取http规范的时间
func getHttpDateStr() string {
return time.Now().UTC().Format(http.TimeFormat)
}
// 时间戳
func timestamp() int64 {
return time.Now().UTC().UnixNano() / 1e6
}
func MustParseTime(str string) *time.Time {
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05 -07", str+" +08", time.Local)
return &lastOpTime
}
func toFamilyOrderBy(o string) string {
switch o {
case "filename":
return "1"
case "filesize":
return "2"
case "lastOpTime":
return "3"
default:
return "1"
}
}
func toDesc(o string) string {
switch o {
case "desc":
return "true"
case "asc":
fallthrough
default:
return "false"
}
}
func ParseHttpHeader(str string) map[string]string {
header := make(map[string]string)
for _, value := range strings.Split(str, "&") {
i := strings.Index(value, "=")
header[strings.TrimSpace(value[0:i])] = strings.TrimSpace(value[i+1:])
}
return header
}
func MustString(str string, err error) string {
return str
}
func MustToBytes(b []byte, err error) []byte {
return b
}
func BoolToNumber(b bool) int {
if b {
return 1
}
return 0
}

29
drivers/189pc/meta.go Normal file
View File

@ -0,0 +1,29 @@
package _189pc
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
VCode string `json:"validate_code"`
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"filename,filesize,lastOpTime" default:"filename"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
FamilyID string `json:"family_id"`
RapidUpload bool `json:"rapid_upload"`
}
var config = driver.Config{
Name: "189CloudPC",
DefaultRoot: "-11",
}
func init() {
op.RegisterDriver(config, func() driver.Driver {
return &Yun189PC{}
})
}

246
drivers/189pc/types.go Normal file
View File

@ -0,0 +1,246 @@
package _189pc
import (
"encoding/xml"
"fmt"
"sort"
"strings"
"time"
)
// 居然有四种返回方式
type RespErr struct {
ResCode string `json:"res_code"`
ResMessage string `json:"res_message"`
XMLName xml.Name `xml:"error"`
Code string `json:"code" xml:"code"`
Message string `json:"message" xml:"message"`
// Code string `json:"code"`
Msg string `json:"msg"`
ErrorCode string `json:"errorCode"`
ErrorMsg string `json:"errorMsg"`
}
// 登陆需要的参数
type LoginParam struct {
// 加密后的用户名和密码
RsaUsername string
RsaPassword string
// rsa密钥
jRsaKey string
// 请求头参数
Lt string
ReqId string
// 表单参数
ParamId string
// 验证码
CaptchaToken string
}
// 登陆加密相关
type EncryptConfResp struct {
Result int `json:"result"`
Data struct {
UpSmsOn string `json:"upSmsOn"`
Pre string `json:"pre"`
PreDomain string `json:"preDomain"`
PubKey string `json:"pubKey"`
} `json:"data"`
}
type LoginResp struct {
Msg string `json:"msg"`
Result int `json:"result"`
ToUrl string `json:"toUrl"`
}
// 刷新session返回
type UserSessionResp struct {
ResCode int `json:"res_code"`
ResMessage string `json:"res_message"`
LoginName string `json:"loginName"`
KeepAlive int `json:"keepAlive"`
GetFileDiffSpan int `json:"getFileDiffSpan"`
GetUserInfoSpan int `json:"getUserInfoSpan"`
// 个人云
SessionKey string `json:"sessionKey"`
SessionSecret string `json:"sessionSecret"`
// 家庭云
FamilySessionKey string `json:"familySessionKey"`
FamilySessionSecret string `json:"familySessionSecret"`
}
// 登录返回
type AppSessionResp struct {
UserSessionResp
IsSaveName string `json:"isSaveName"`
// 会话刷新Token
AccessToken string `json:"accessToken"`
//Token刷新
RefreshToken string `json:"refreshToken"`
}
// 家庭云账户
type FamilyInfoListResp struct {
FamilyInfoResp []FamilyInfoResp `json:"familyInfoResp"`
}
type FamilyInfoResp struct {
Count int `json:"count"`
CreateTime string `json:"createTime"`
FamilyID int `json:"familyId"`
RemarkName string `json:"remarkName"`
Type int `json:"type"`
UseFlag int `json:"useFlag"`
UserRole int `json:"userRole"`
}
/*文件部分*/
// 文件
type Cloud189File struct {
CreateDate string `json:"createDate"`
FileCata int64 `json:"fileCata"`
Icon struct {
//iconOption 5
SmallUrl string `json:"smallUrl"`
LargeUrl string `json:"largeUrl"`
// iconOption 10
Max600 string `json:"max600"`
MediumURL string `json:"mediumUrl"`
} `json:"icon"`
ID int64 `json:"id"`
LastOpTime string `json:"lastOpTime"`
Md5 string `json:"md5"`
MediaType int `json:"mediaType"`
Name string `json:"name"`
Orientation int64 `json:"orientation"`
Rev string `json:"rev"`
Size int64 `json:"size"`
StarLabel int64 `json:"starLabel"`
parseTime *time.Time
}
func (c *Cloud189File) GetSize() int64 { return c.Size }
func (c *Cloud189File) GetName() string { return c.Name }
func (c *Cloud189File) ModTime() time.Time {
if c.parseTime == nil {
c.parseTime = MustParseTime(c.LastOpTime)
}
return *c.parseTime
}
func (c *Cloud189File) IsDir() bool { return false }
func (c *Cloud189File) GetID() string { return fmt.Sprint(c.ID) }
func (c *Cloud189File) GetPath() string { return "" }
func (c *Cloud189File) Thumb() string { return c.Icon.SmallUrl }
// 文件夹
type Cloud189Folder struct {
ID int64 `json:"id"`
ParentID int64 `json:"parentId"`
Name string `json:"name"`
FileCata int64 `json:"fileCata"`
FileCount int64 `json:"fileCount"`
LastOpTime string `json:"lastOpTime"`
CreateDate string `json:"createDate"`
FileListSize int64 `json:"fileListSize"`
Rev string `json:"rev"`
StarLabel int64 `json:"starLabel"`
parseTime *time.Time
}
func (c *Cloud189Folder) GetSize() int64 { return 0 }
func (c *Cloud189Folder) GetName() string { return c.Name }
func (c *Cloud189Folder) ModTime() time.Time {
if c.parseTime == nil {
c.parseTime = MustParseTime(c.LastOpTime)
}
return *c.parseTime
}
func (c *Cloud189Folder) IsDir() bool { return true }
func (c *Cloud189Folder) GetID() string { return fmt.Sprint(c.ID) }
func (c *Cloud189Folder) GetPath() string { return "" }
type Cloud189FilesResp struct {
//ResCode int `json:"res_code"`
//ResMessage string `json:"res_message"`
FileListAO struct {
Count int `json:"count"`
FileList []Cloud189File `json:"fileList"`
FolderList []Cloud189Folder `json:"folderList"`
} `json:"fileListAO"`
}
// TaskInfo 任务信息
type BatchTaskInfo struct {
// FileId 文件ID
FileId string `json:"fileId"`
// FileName 文件名
FileName string `json:"fileName"`
// IsFolder 是否是文件夹0-否1-是
IsFolder int `json:"isFolder"`
// SrcParentId 文件所在父目录ID
//SrcParentId string `json:"srcParentId"`
}
/* 上传部分 */
type InitMultiUploadResp struct {
//Code string `json:"code"`
Data struct {
UploadType int `json:"uploadType"`
UploadHost string `json:"uploadHost"`
UploadFileID string `json:"uploadFileId"`
FileDataExists int `json:"fileDataExists"`
} `json:"data"`
}
type UploadUrlsResp struct {
Code string `json:"code"`
UploadUrls map[string]Part `json:"uploadUrls"`
}
type Part struct {
RequestURL string `json:"requestURL"`
RequestHeader string `json:"requestHeader"`
}
type Params map[string]string
func (p Params) Set(k, v string) {
p[k] = v
}
func (p Params) Encode() string {
if p == nil {
return ""
}
var buf strings.Builder
keys := make([]string, 0, len(p))
for k := range p {
keys = append(keys, k)
}
sort.Strings(keys)
for i := range keys {
if buf.Len() > 0 {
buf.WriteByte('&')
}
buf.WriteString(keys[i])
buf.WriteByte('=')
buf.WriteString(p[keys[i]])
}
return buf.String()
}

659
drivers/189pc/utils.go Normal file
View File

@ -0,0 +1,659 @@
package _189pc
import (
"bytes"
"context"
"crypto/md5"
"encoding/base64"
"encoding/hex"
"fmt"
"io"
"math"
"net/http"
"net/http/cookiejar"
"net/url"
"os"
"regexp"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/internal/setting"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
"github.com/google/uuid"
jsoniter "github.com/json-iterator/go"
)
const (
ACCOUNT_TYPE = "02"
APP_ID = "8025431004"
CLIENT_TYPE = "10020"
VERSION = "6.2"
WEB_URL = "https://cloud.189.cn"
AUTH_URL = "https://open.e.189.cn"
API_URL = "https://api.cloud.189.cn"
UPLOAD_URL = "https://upload.cloud.189.cn"
RETURN_URL = "https://m.cloud.189.cn/zhuanti/2020/loginErrorPc/index.html"
PC = "TELEPC"
MAC = "TELEMAC"
CHANNEL_ID = "web_cloud.189.cn"
)
func (y *Yun189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
dateOfGmt := getHttpDateStr()
sessionKey := y.tokenInfo.SessionKey
sessionSecret := y.tokenInfo.SessionSecret
if y.isFamily() {
sessionKey = y.tokenInfo.FamilySessionKey
sessionSecret = y.tokenInfo.FamilySessionSecret
}
req := y.client.R().SetQueryParams(clientSuffix()).SetHeaders(map[string]string{
"Date": dateOfGmt,
"SessionKey": sessionKey,
"X-Request-ID": uuid.NewString(),
})
// 设置params
var paramsData string
if params != nil {
paramsData = AesECBEncrypt(params.Encode(), sessionSecret[:16])
req.SetQueryParam("params", paramsData)
}
req.SetHeader("Signature", signatureOfHmac(sessionSecret, sessionKey, method, url, dateOfGmt, paramsData))
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
var erron RespErr
utils.Json.Unmarshal(res.Body(), &erron)
if erron.ResCode != "" {
return nil, fmt.Errorf("res_code: %s ,res_msg: %s", erron.ResCode, erron.ResMessage)
}
if erron.Code != "" && erron.Code != "SUCCESS" {
if erron.Msg != "" {
return nil, fmt.Errorf("code: %s ,msg: %s", erron.Code, erron.Msg)
}
if erron.Message != "" {
return nil, fmt.Errorf("code: %s ,msg: %s", erron.Code, erron.Message)
}
return nil, fmt.Errorf(res.String())
}
switch erron.ErrorCode {
case "":
break
case "InvalidSessionKey":
if err = y.refreshSession(); err != nil {
return nil, err
}
return y.request(url, method, callback, params, resp)
default:
return nil, fmt.Errorf("err_code: %s ,err_msg: %s", erron.ErrorCode, erron.ErrorMsg)
}
if strings.Contains(res.String(), "userSessionBO is null") {
if err = y.refreshSession(); err != nil {
return nil, err
}
return y.request(url, method, callback, params, resp)
}
resCode := utils.Json.Get(res.Body(), "res_code").ToInt64()
message := utils.Json.Get(res.Body(), "res_message").ToString()
switch resCode {
case 0:
return res.Body(), nil
default:
return nil, fmt.Errorf("res_code: %d ,res_msg: %s", resCode, message)
}
}
func (y *Yun189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
return y.request(url, http.MethodGet, callback, nil, resp)
}
func (y *Yun189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
return y.request(url, http.MethodPost, callback, nil, resp)
}
func (y *Yun189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
fullUrl := API_URL
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/listFiles.action"
res := make([]model.Obj, 0, 130)
for pageNum := 1; ; pageNum++ {
var resp Cloud189FilesResp
_, err := y.get(fullUrl, func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParams(map[string]string{
"folderId": fileId,
"fileType": "0",
"mediaAttr": "0",
"iconOption": "5",
"pageNum": fmt.Sprint(pageNum),
"pageSize": "130",
})
if y.isFamily() {
r.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"orderBy": toFamilyOrderBy(y.OrderBy),
"descending": toDesc(y.OrderDirection),
})
} else {
r.SetQueryParams(map[string]string{
"recursive": "0",
"orderBy": y.OrderBy,
"descending": toDesc(y.OrderDirection),
})
}
}, &resp)
if err != nil {
return nil, err
}
// 获取完毕跳出
if resp.FileListAO.Count == 0 {
break
}
for i := 0; i < len(resp.FileListAO.FolderList); i++ {
res = append(res, &resp.FileListAO.FolderList[i])
}
for i := 0; i < len(resp.FileListAO.FileList); i++ {
res = append(res, &resp.FileListAO.FileList[i])
}
}
return res, nil
}
func (y *Yun189PC) login() (err error) {
// 初始化登陆所需参数
if y.loginParam == nil {
if err = y.initLoginParam(); err != nil {
// 验证码也通过错误返回
return err
}
}
defer func() {
// 销毁验证码
y.VCode = ""
// 销毁登陆参数
y.loginParam = nil
// 遇到错误,重新加载登陆参数
if err != nil {
if err1 := y.initLoginParam(); err1 != nil {
err = fmt.Errorf("err1: %s \nerr2: %s", err, err1)
}
}
}()
param := y.loginParam
var loginresp LoginResp
_, err = y.client.R().
ForceContentType("application/json;charset=UTF-8").SetResult(&loginresp).
SetHeaders(map[string]string{
"REQID": param.ReqId,
"lt": param.Lt,
}).
SetFormData(map[string]string{
"appKey": APP_ID,
"accountType": ACCOUNT_TYPE,
"userName": param.RsaUsername,
"password": param.RsaPassword,
"validateCode": y.VCode,
"captchaToken": param.CaptchaToken,
"returnUrl": RETURN_URL,
"mailSuffix": "@189.cn",
"dynamicCheck": "FALSE",
"clientType": CLIENT_TYPE,
"cb_SaveName": "1",
"isOauth2": "false",
"state": "",
"paramId": param.ParamId,
}).
Post(AUTH_URL + "/api/logbox/oauth2/loginSubmit.do")
if err != nil {
return err
}
if loginresp.ToUrl == "" {
return fmt.Errorf("login failed,No toUrl obtained, msg: %s", loginresp.Msg)
}
// 获取Session
var erron RespErr
var tokenInfo AppSessionResp
_, err = y.client.R().
SetResult(&tokenInfo).SetError(&erron).
SetQueryParams(clientSuffix()).
SetQueryParam("redirectURL", url.QueryEscape(loginresp.ToUrl)).
Post(API_URL + "/getSessionForPC.action")
if err != nil {
return
}
if erron.ResCode != "" {
err = fmt.Errorf(erron.ResMessage)
return
}
if tokenInfo.ResCode != 0 {
err = fmt.Errorf(tokenInfo.ResMessage)
return
}
y.tokenInfo = &tokenInfo
return
}
/* 初始化登陆需要的参数
* 如果遇到验证码返回错误
*/
func (y *Yun189PC) initLoginParam() error {
// 清除cookie
jar, _ := cookiejar.New(nil)
y.client.SetCookieJar(jar)
res, err := y.client.R().
SetQueryParams(map[string]string{
"appId": APP_ID,
"clientType": CLIENT_TYPE,
"returnURL": RETURN_URL,
"timeStamp": fmt.Sprint(timestamp()),
}).
Get(WEB_URL + "/api/portal/unifyLoginForPC.action")
if err != nil {
return err
}
param := LoginParam{
CaptchaToken: regexp.MustCompile(`'captchaToken' value='(.+?)'`).FindStringSubmatch(res.String())[1],
Lt: regexp.MustCompile(`lt = "(.+?)"`).FindStringSubmatch(res.String())[1],
ParamId: regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(res.String())[1],
ReqId: regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(res.String())[1],
// jRsaKey: regexp.MustCompile(`"j_rsaKey" value="(.+?)"`).FindStringSubmatch(res.String())[1],
}
// 获取rsa公钥
var encryptConf EncryptConfResp
_, err = y.client.R().
ForceContentType("application/json;charset=UTF-8").SetResult(&encryptConf).
SetFormData(map[string]string{"appId": APP_ID}).
Post(AUTH_URL + "/api/logbox/config/encryptConf.do")
if err != nil {
return err
}
param.jRsaKey = fmt.Sprintf("-----BEGIN PUBLIC KEY-----\n%s\n-----END PUBLIC KEY-----", encryptConf.Data.PubKey)
param.RsaUsername = encryptConf.Data.Pre + RsaEncrypt(param.jRsaKey, y.Username)
param.RsaPassword = encryptConf.Data.Pre + RsaEncrypt(param.jRsaKey, y.Password)
// 判断是否需要验证码
res, err = y.client.R().
SetFormData(map[string]string{
"appKey": APP_ID,
"accountType": ACCOUNT_TYPE,
"userName": param.RsaUsername,
}).
Post(AUTH_URL + "/api/logbox/oauth2/needcaptcha.do")
if err != nil {
return err
}
y.loginParam = &param
if res.String() != "0" {
imgRes, err := y.client.R().
SetQueryParams(map[string]string{
"token": param.CaptchaToken,
"REQID": param.ReqId,
"rnd": fmt.Sprint(timestamp()),
}).
Get(AUTH_URL + "/api/logbox/oauth2/picCaptcha.do")
if err != nil {
return fmt.Errorf("failed to obtain verification code")
}
// 尝试使用ocr
vRes, err := base.RestyClient.R().
SetMultipartField("image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
Post(setting.GetStr(conf.OcrApi))
if err == nil && jsoniter.Get(vRes.Body(), "status").ToInt() == 200 {
y.VCode = jsoniter.Get(vRes.Body(), "result").ToString()
}
// ocr无法处理返回验证码图片给前端
if len(y.VCode) != 4 {
return fmt.Errorf("need validate code: data:image/png;base64,%s", base64.StdEncoding.EncodeToString(res.Body()))
}
}
return nil
}
// 刷新会话
func (y *Yun189PC) refreshSession() (err error) {
var erron RespErr
var userSessionResp UserSessionResp
_, err = y.client.R().
SetResult(&userSessionResp).SetError(&erron).
SetQueryParams(clientSuffix()).
SetQueryParams(map[string]string{
"appId": APP_ID,
"accessToken": y.tokenInfo.AccessToken,
}).
SetHeader("X-Request-ID", uuid.NewString()).
Get(API_URL + "/getSessionForPC.action")
if err != nil {
return err
}
// 错误影响正常访问,下线该储存
defer func() {
if err != nil {
y.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
op.MustSaveDriverStorage(y)
}
}()
switch erron.ResCode {
case "":
break
case "UserInvalidOpenToken":
if err = y.login(); err != nil {
return err
}
default:
err = fmt.Errorf("res_code: %s ,res_msg: %s", erron.ResCode, erron.ResMessage)
return
}
switch userSessionResp.ResCode {
case 0:
y.tokenInfo.UserSessionResp = userSessionResp
default:
err = fmt.Errorf("code: %d , msg: %s", userSessionResp.ResCode, userSessionResp.ResMessage)
}
return
}
// 普通上传
func (y *Yun189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
const DEFAULT int64 = 10485760
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
params := Params{
"parentFolderId": dstDir.GetID(),
"fileName": url.QueryEscape(file.GetName()),
"fileSize": fmt.Sprint(file.GetSize()),
"sliceSize": fmt.Sprint(DEFAULT),
"lazyCheck": "1",
}
fullUrl := UPLOAD_URL
if y.isFamily() {
params.Set("familyId", y.FamilyID)
fullUrl += "/family"
} else {
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
fullUrl += "/person"
}
// 初始化上传
var initMultiUpload InitMultiUploadResp
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
req.SetContext(ctx)
}, params, &initMultiUpload)
if err != nil {
return err
}
fileMd5 := md5.New()
silceMd5 := md5.New()
silceMd5Hexs := make([]string, 0, count)
byteData := bytes.NewBuffer(make([]byte, DEFAULT))
for i := int64(1); i <= count; i++ {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
// 读取块
byteData.Reset()
silceMd5.Reset()
_, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5, byteData), file, DEFAULT)
if err != io.EOF && err != io.ErrUnexpectedEOF && err != nil {
return err
}
// 计算块md5并进行hex和base64编码
md5Bytes := silceMd5.Sum(nil)
silceMd5Hexs = append(silceMd5Hexs, strings.ToUpper(hex.EncodeToString(md5Bytes)))
silceMd5Base64 := base64.StdEncoding.EncodeToString(md5Bytes)
// 获取上传链接
var uploadUrl UploadUrlsResp
_, err = y.request(fullUrl+"/getMultiUploadUrls", http.MethodGet,
func(req *resty.Request) {
req.SetContext(ctx)
}, Params{
"partInfo": fmt.Sprintf("%d-%s", i, silceMd5Base64),
"uploadFileId": initMultiUpload.Data.UploadFileID,
}, &uploadUrl)
if err != nil {
return err
}
// 开始上传
uploadData := uploadUrl.UploadUrls[fmt.Sprint("partNumber_", i)]
res, err := y.putClient.R().
SetContext(ctx).
SetQueryParams(clientSuffix()).
SetHeaders(ParseHttpHeader(uploadData.RequestHeader)).
SetBody(byteData).
Put(uploadData.RequestURL)
if err != nil {
return err
}
if res.StatusCode() != http.StatusOK {
return fmt.Errorf("updload fail,msg: %s", res.String())
}
up(int(i * 100 / count))
}
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
sliceMd5Hex := fileMd5Hex
if file.GetSize() > DEFAULT {
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
}
// 提交上传
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
func(req *resty.Request) {
req.SetContext(ctx)
}, Params{
"uploadFileId": initMultiUpload.Data.UploadFileID,
"fileMd5": fileMd5Hex,
"sliceMd5": sliceMd5Hex,
"lazyCheck": "1",
"isLog": "0",
"opertype": "3",
}, nil)
return err
}
// 快传
func (y *Yun189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := utils.CreateTempFile(file.GetReadCloser())
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
const DEFAULT int64 = 10485760
count := int(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
// 优先计算所需信息
fileMd5 := md5.New()
silceMd5 := md5.New()
silceMd5Hexs := make([]string, 0, count)
silceMd5Base64s := make([]string, 0, count)
for i := 1; i <= count; i++ {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
silceMd5.Reset()
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5), tempFile, DEFAULT); err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
return err
}
md5Byte := silceMd5.Sum(nil)
silceMd5Hexs = append(silceMd5Hexs, strings.ToUpper(hex.EncodeToString(md5Byte)))
silceMd5Base64s = append(silceMd5Base64s, fmt.Sprint(i, "-", base64.StdEncoding.EncodeToString(md5Byte)))
}
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
return err
}
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
sliceMd5Hex := fileMd5Hex
if file.GetSize() > DEFAULT {
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
}
// 检测是否支持快传
params := Params{
"parentFolderId": dstDir.GetID(),
"fileName": url.QueryEscape(file.GetName()),
"fileSize": fmt.Sprint(file.GetSize()),
"fileMd5": fileMd5Hex,
"sliceSize": fmt.Sprint(DEFAULT),
"sliceMd5": sliceMd5Hex,
}
fullUrl := UPLOAD_URL
if y.isFamily() {
params.Set("familyId", y.FamilyID)
fullUrl += "/family"
} else {
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
fullUrl += "/person"
}
var uploadInfo InitMultiUploadResp
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
req.SetContext(ctx)
}, params, &uploadInfo)
if err != nil {
return err
}
// 网盘中不存在该文件,开始上传
if uploadInfo.Data.FileDataExists != 1 {
var uploadUrls UploadUrlsResp
_, err = y.request(fullUrl+"/getMultiUploadUrls", http.MethodGet,
func(req *resty.Request) {
req.SetContext(ctx)
}, Params{
"uploadFileId": uploadInfo.Data.UploadFileID,
"partInfo": strings.Join(silceMd5Base64s, ","),
}, &uploadUrls)
if err != nil {
return err
}
for i := 1; i <= count; i++ {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
uploadData := uploadUrls.UploadUrls[fmt.Sprint("partNumber_", i)]
res, err := y.putClient.R().
SetContext(ctx).
SetQueryParams(clientSuffix()).
SetHeaders(ParseHttpHeader(uploadData.RequestHeader)).
SetBody(io.LimitReader(tempFile, DEFAULT)).
Put(uploadData.RequestURL)
if err != nil {
return err
}
if res.StatusCode() != http.StatusOK {
return fmt.Errorf("updload fail,msg: %s", res.String())
}
up(int(i * 100 / count))
}
}
// 提交
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
func(req *resty.Request) {
req.SetContext(ctx)
}, Params{
"uploadFileId": uploadInfo.Data.UploadFileID,
"isLog": "0",
"opertype": "3",
}, nil)
return err
}
func (y *Yun189PC) isFamily() bool {
return y.Type == "family"
}
func (y *Yun189PC) isLogin() bool {
if y.tokenInfo == nil {
return false
}
_, err := y.get(API_URL+"/getUserInfo.action", nil, nil)
return err == nil
}
// 获取家庭云所有用户信息
func (y *Yun189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
var resp FamilyInfoListResp
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp)
if err != nil {
return nil, err
}
return resp.FamilyInfoResp, nil
}
// 抽取家庭云ID
func (y *Yun189PC) getFamilyID() (string, error) {
infos, err := y.getFamilyInfoList()
if err != nil {
return "", err
}
if len(infos) == 0 {
return "", fmt.Errorf("cannot get automatically,please input family_id")
}
for _, info := range infos {
if strings.Contains(y.tokenInfo.LoginName, info.RemarkName) {
return fmt.Sprint(info.FamilyID), nil
}
}
return fmt.Sprint(infos[0].FamilyID), nil
}

View File

@ -0,0 +1,314 @@
package aliyundrive
import (
"bytes"
"context"
"crypto/sha1"
"encoding/base64"
"encoding/hex"
"fmt"
"io"
"math"
"math/big"
"net/http"
"os"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/cron"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type AliDrive struct {
model.Storage
Addition
AccessToken string
cron *cron.Cron
DriveId string
}
func (d *AliDrive) Config() driver.Config {
return config
}
func (d *AliDrive) GetAddition() driver.Additional {
return d.Addition
}
func (d *AliDrive) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
// TODO login / refresh token
//op.MustSaveDriverStorage(d)
err = d.refreshToken()
if err != nil {
return err
}
// get driver id
res, err, _ := d.request("https://api.aliyundrive.com/v2/user/get", http.MethodPost, nil, nil)
if err != nil {
return err
}
d.DriveId = utils.Json.Get(res, "default_drive_id").ToString()
d.cron = cron.NewCron(time.Hour * 2)
d.cron.Do(func() {
err := d.refreshToken()
if err != nil {
log.Errorf("%+v", err)
}
})
return err
}
func (d *AliDrive) Drop(ctx context.Context) error {
if d.cron != nil {
d.cron.Stop()
}
return nil
}
func (d *AliDrive) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
//func (d *AliDrive) Get(ctx context.Context, path string) (model.Obj, error) {
// // TODO this is optional
// return nil, errs.NotImplement
//}
func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
data := base.Json{
"drive_id": d.DriveId,
"file_id": file.GetID(),
"expire_sec": 14400,
}
res, err, _ := d.request("https://api.aliyundrive.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err != nil {
return nil, err
}
return &model.Link{
Header: http.Header{
"Referer": []string{"https://www.aliyundrive.com/"},
},
URL: utils.Json.Get(res, "url").ToString(),
}, nil
}
func (d *AliDrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
_, err, _ := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"check_name_mode": "refuse",
"drive_id": d.DriveId,
"name": dirName,
"parent_file_id": parentDir.GetID(),
"type": "folder",
})
}, nil)
return err
}
func (d *AliDrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
err := d.batch(srcObj.GetID(), dstDir.GetID(), "/file/move")
return err
}
func (d *AliDrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
_, err, _ := d.request("https://api.aliyundrive.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"check_name_mode": "refuse",
"drive_id": d.DriveId,
"file_id": srcObj.GetID(),
"name": newName,
})
}, nil)
return err
}
func (d *AliDrive) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
err := d.batch(srcObj.GetID(), dstDir.GetID(), "/file/copy")
return err
}
func (d *AliDrive) Remove(ctx context.Context, obj model.Obj) error {
_, err, _ := d.request("https://api.aliyundrive.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": obj.GetID(),
})
}, nil)
return err
}
func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
file := model.FileStream{
Obj: stream,
ReadCloser: stream,
Mimetype: stream.GetMimetype(),
}
const DEFAULT int64 = 10485760
var count = int(math.Ceil(float64(stream.GetSize()) / float64(DEFAULT)))
partInfoList := make([]base.Json, 0, count)
for i := 1; i <= count; i++ {
partInfoList = append(partInfoList, base.Json{"part_number": i})
}
reqBody := base.Json{
"check_name_mode": "overwrite",
"drive_id": d.DriveId,
"name": file.GetName(),
"parent_file_id": dstDir.GetID(),
"part_info_list": partInfoList,
"size": file.GetSize(),
"type": "file",
}
if d.RapidUpload {
buf := bytes.NewBuffer(make([]byte, 0, 1024))
io.CopyN(buf, file, 1024)
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.String())
// 把头部拼接回去
file.ReadCloser = struct {
io.Reader
io.Closer
}{
Reader: io.MultiReader(buf, file),
Closer: file,
}
} else {
reqBody["content_hash_name"] = "none"
reqBody["proof_version"] = "v1"
}
var resp UploadResp
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
if err != nil && e.Code != "PreHashMatched" {
return err
}
if d.RapidUpload && e.Code == "PreHashMatched" {
tempFile, err := os.CreateTemp(conf.Conf.TempDir, "file-*")
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
delete(reqBody, "pre_hash")
h := sha1.New()
if _, err = io.Copy(io.MultiWriter(tempFile, h), file); err != nil {
return err
}
reqBody["content_hash"] = hex.EncodeToString(h.Sum(nil))
reqBody["content_hash_name"] = "sha1"
reqBody["proof_version"] = "v1"
/*
js 隐性转换太坑不知道有没有bug
var n = e.access_token
r = new BigNumber('0x'.concat(md5(n).slice(0, 16)))
i = new BigNumber(t.file.size)
o = i ? r.mod(i) : new gt.BigNumber(0);
(t.file.slice(o.toNumber(), Math.min(o.plus(8).toNumber(), t.file.size)))
*/
buf := make([]byte, 8)
r, _ := new(big.Int).SetString(utils.GetMD5Encode(d.AccessToken)[:16], 16)
i := new(big.Int).SetInt64(file.GetSize())
o := r.Mod(r, i)
n, _ := io.NewSectionReader(tempFile, o.Int64(), 8).Read(buf[:8])
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
if err != nil && e.Code != "PreHashMatched" {
return err
}
if resp.RapidUpload {
return nil
}
// 秒传失败
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
return err
}
file.ReadCloser = tempFile
}
for i, partInfo := range resp.PartInfoList {
req, err := http.NewRequest("PUT", partInfo.UploadUrl, io.LimitReader(file, DEFAULT))
if err != nil {
return err
}
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
res.Body.Close()
if count > 0 {
up(i * 100 / count)
}
}
var resp2 base.Json
_, err, e = d.request("https://api.aliyundrive.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": resp.FileId,
"upload_id": resp.UploadId,
})
}, &resp2)
if err != nil && e.Code != "PreHashMatched" {
return err
}
if resp2["file_id"] == resp.FileId {
return nil
}
return fmt.Errorf("%+v", resp2)
}
func (d *AliDrive) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
var resp base.Json
var url string
data := base.Json{
"drive_id": d.DriveId,
"file_id": args.Obj.GetID(),
}
switch args.Method {
case "doc_preview":
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
data["access_token"] = d.AccessToken
case "video_preview":
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
data["category"] = "live_transcoding"
default:
return nil, errs.NotSupport
}
_, err, _ := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return nil, err
}
return resp, nil
}
var _ driver.Driver = (*AliDrive)(nil)

View File

@ -0,0 +1,27 @@
package aliyundrive
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootID
RefreshToken string `json:"refresh_token" required:"true"`
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
RapidUpload bool `json:"rapid_upload"`
}
var config = driver.Config{
Name: "Aliyundrive",
DefaultRoot: "root",
}
func New() driver.Driver {
return &AliDrive{}
}
func init() {
op.RegisterDriver(config, New)
}

View File

@ -0,0 +1,54 @@
package aliyundrive
import (
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type RespErr struct {
Code string `json:"code"`
Message string `json:"message"`
}
type Files struct {
Items []File `json:"items"`
NextMarker string `json:"next_marker"`
}
type File struct {
DriveId string `json:"drive_id"`
CreatedAt *time.Time `json:"created_at"`
FileExtension string `json:"file_extension"`
FileId string `json:"file_id"`
Type string `json:"type"`
Name string `json:"name"`
Category string `json:"category"`
ParentFileId string `json:"parent_file_id"`
UpdatedAt time.Time `json:"updated_at"`
Size int64 `json:"size"`
Thumbnail string `json:"thumbnail"`
Url string `json:"url"`
}
func fileToObj(f File) *model.ObjThumb {
return &model.ObjThumb{
Object: model.Object{
ID: f.FileId,
Name: f.Name,
Size: f.Size,
Modified: f.UpdatedAt,
IsFolder: f.Type == "folder",
},
}
}
type UploadResp struct {
FileId string `json:"file_id"`
UploadId string `json:"upload_id"`
PartInfoList []struct {
UploadUrl string `json:"upload_url"`
} `json:"part_info_list"`
RapidUpload bool `json:"rapid_upload"`
}

133
drivers/aliyundrive/util.go Normal file
View File

@ -0,0 +1,133 @@
package aliyundrive
import (
"errors"
"fmt"
"net/http"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
// do others that not defined in Driver interface
func (d *AliDrive) refreshToken() error {
url := "https://auth.aliyundrive.com/v2/account/token"
var resp base.TokenResp
var e RespErr
_, err := base.RestyClient.R().
//ForceContentType("application/json").
SetBody(base.Json{"refresh_token": d.RefreshToken, "grant_type": "refresh_token"}).
SetResult(&resp).
SetError(&e).
Post(url)
if err != nil {
return err
}
if e.Code != "" {
return fmt.Errorf("failed to refresh token: %s", e.Message)
}
d.RefreshToken, d.AccessToken = resp.RefreshToken, resp.AccessToken
op.MustSaveDriverStorage(d)
return nil
}
func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp interface{}) ([]byte, error, RespErr) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer\t"+d.AccessToken)
req.SetHeader("content-type", "application/json")
req.SetHeader("origin", "https://www.aliyundrive.com")
if callback != nil {
callback(req)
} else {
req.SetBody("{}")
}
if resp != nil {
req.SetResult(resp)
}
var e RespErr
req.SetError(&e)
res, err := req.Execute(method, url)
if err != nil {
return nil, err, e
}
if e.Code != "" {
if e.Code == "AccessTokenInvalid" {
err = d.refreshToken()
if err != nil {
return nil, err, e
}
return d.request(url, method, callback, resp)
}
return nil, errors.New(e.Message), e
}
return res.Body(), nil, e
}
func (d *AliDrive) getFiles(fileId string) ([]File, error) {
marker := "first"
res := make([]File, 0)
for marker != "" {
if marker == "first" {
marker = ""
}
var resp Files
data := base.Json{
"drive_id": d.DriveId,
"fields": "*",
"image_thumbnail_process": "image/resize,w_400/format,jpeg",
"image_url_process": "image/resize,w_1920/format,jpeg",
"limit": 200,
"marker": marker,
"order_by": d.OrderBy,
"order_direction": d.OrderDirection,
"parent_file_id": fileId,
"video_thumbnail_process": "video/snapshot,t_0,f_jpg,ar_auto,w_300",
"url_expire_sec": 14400,
}
_, err, _ := d.request("https://api.aliyundrive.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return nil, err
}
marker = resp.NextMarker
res = append(res, resp.Items...)
}
return res, nil
}
func (d *AliDrive) batch(srcId, dstId string, url string) error {
res, err, _ := d.request("https://api.aliyundrive.com/v3/batch", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"requests": []base.Json{
{
"headers": base.Json{
"Content-Type": "application/json",
},
"method": "POST",
"id": srcId,
"body": base.Json{
"drive_id": d.DriveId,
"file_id": srcId,
"to_drive_id": d.DriveId,
"to_parent_file_id": dstId,
},
"url": url,
},
},
"resource": "file",
})
}, nil)
if err != nil {
return err
}
status := utils.Json.Get(res, "responses", 0, "status").ToInt()
if status < 400 && status >= 100 {
return nil
}
return errors.New(string(res))
}

View File

@ -1,6 +1,32 @@
package drivers
import (
_ "github.com/alist-org/alist/v3/drivers/123"
_ "github.com/alist-org/alist/v3/drivers/139"
_ "github.com/alist-org/alist/v3/drivers/189"
_ "github.com/alist-org/alist/v3/drivers/189pc"
_ "github.com/alist-org/alist/v3/drivers/aliyundrive"
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
_ "github.com/alist-org/alist/v3/drivers/ftp"
_ "github.com/alist-org/alist/v3/drivers/google_drive"
_ "github.com/alist-org/alist/v3/drivers/local"
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
_ "github.com/alist-org/alist/v3/drivers/onedrive"
_ "github.com/alist-org/alist/v3/drivers/pikpak"
_ "github.com/alist-org/alist/v3/drivers/quark"
_ "github.com/alist-org/alist/v3/drivers/s3"
_ "github.com/alist-org/alist/v3/drivers/sftp"
_ "github.com/alist-org/alist/v3/drivers/teambition"
_ "github.com/alist-org/alist/v3/drivers/thunder"
_ "github.com/alist-org/alist/v3/drivers/uss"
_ "github.com/alist-org/alist/v3/drivers/virtual"
_ "github.com/alist-org/alist/v3/drivers/webdav"
_ "github.com/alist-org/alist/v3/drivers/yandex_disk"
)
// All do nothing,just for import
// same as _ import
func All() {
}

View File

@ -0,0 +1,233 @@
package baidu_netdisk
import (
"bytes"
"context"
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"math"
"os"
stdpath "path"
"strconv"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
type BaiduNetdisk struct {
model.Storage
Addition
AccessToken string
}
func (d *BaiduNetdisk) Config() driver.Config {
return config
}
func (d *BaiduNetdisk) GetAddition() driver.Additional {
return d.Addition
}
func (d *BaiduNetdisk) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
return d.refreshToken()
}
func (d *BaiduNetdisk) Drop(ctx context.Context) error {
return nil
}
func (d *BaiduNetdisk) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetPath())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
//func (d *BaiduNetdisk) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *BaiduNetdisk) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if d.DownloadAPI == "crack" {
return d.linkCrack(file, args)
}
return d.linkOfficial(file, args)
}
func (d *BaiduNetdisk) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
_, err := d.create(stdpath.Join(parentDir.GetPath(), dirName), 0, 1, "", "")
return err
}
func (d *BaiduNetdisk) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
data := []base.Json{
{
"path": srcObj.GetPath(),
"dest": dstDir.GetPath(),
"newname": srcObj.GetName(),
},
}
_, err := d.manage("move", data)
return err
}
func (d *BaiduNetdisk) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
data := []base.Json{
{
"path": srcObj.GetPath(),
"newname": newName,
},
}
_, err := d.manage("rename", data)
return err
}
func (d *BaiduNetdisk) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
dest, newname := stdpath.Split(dstDir.GetPath())
data := []base.Json{
{
"path": srcObj.GetPath(),
"dest": dest,
"newname": newname,
},
}
_, err := d.manage("copy", data)
return err
}
func (d *BaiduNetdisk) Remove(ctx context.Context, obj model.Obj) error {
data := []string{obj.GetPath()}
_, err := d.manage("delete", data)
return err
}
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
var Default int64 = 4 * 1024 * 1024
defaultByteData := make([]byte, Default)
count := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
var SliceSize int64 = 256 * 1024
// cal md5
h1 := md5.New()
h2 := md5.New()
block_list := make([]string, 0)
content_md5 := ""
slice_md5 := ""
left := stream.GetSize()
for i := 0; i < count; i++ {
byteSize := Default
var byteData []byte
if left < Default {
byteSize = left
byteData = make([]byte, byteSize)
} else {
byteData = defaultByteData
}
left -= byteSize
_, err = io.ReadFull(tempFile, byteData)
if err != nil {
return err
}
h1.Write(byteData)
h2.Write(byteData)
block_list = append(block_list, fmt.Sprintf("\"%s\"", hex.EncodeToString(h2.Sum(nil))))
h2.Reset()
}
content_md5 = hex.EncodeToString(h1.Sum(nil))
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
if stream.GetSize() <= SliceSize {
slice_md5 = content_md5
} else {
sliceData := make([]byte, SliceSize)
_, err = io.ReadFull(tempFile, sliceData)
if err != nil {
return err
}
h2.Write(sliceData)
slice_md5 = hex.EncodeToString(h2.Sum(nil))
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
}
path := encodeURIComponent(stdpath.Join(dstDir.GetPath(), stream.GetName()))
block_list_str := fmt.Sprintf("[%s]", strings.Join(block_list, ","))
data := fmt.Sprintf("path=%s&size=%d&isdir=0&autoinit=1&block_list=%s&content-md5=%s&slice-md5=%s",
path, stream.GetSize(),
block_list_str,
content_md5, slice_md5)
params := map[string]string{
"method": "precreate",
}
var precreateResp PrecreateResp
_, err = d.post("/xpan/file", params, data, &precreateResp)
if err != nil {
return err
}
log.Debugf("%+v", precreateResp)
if precreateResp.ReturnType == 2 {
return nil
}
params = map[string]string{
"method": "upload",
"access_token": d.AccessToken,
"type": "tmpfile",
"path": path,
"uploadid": precreateResp.Uploadid,
}
left = stream.GetSize()
for i, partseq := range precreateResp.BlockList {
byteSize := Default
var byteData []byte
if left < Default {
byteSize = left
byteData = make([]byte, byteSize)
} else {
byteData = defaultByteData
}
left -= byteSize
_, err = io.ReadFull(tempFile, byteData)
if err != nil {
return err
}
u := "https://d.pcs.baidu.com/rest/2.0/pcs/superfile2"
params["partseq"] = strconv.Itoa(partseq)
res, err := base.RestyClient.R().SetQueryParams(params).SetFileReader("file", stream.GetName(), bytes.NewReader(byteData)).Post(u)
if err != nil {
return err
}
log.Debugln(res.String())
if len(precreateResp.BlockList) > 0 {
up(i * 100 / len(precreateResp.BlockList))
}
}
_, err = d.create(path, stream.GetSize(), 0, precreateResp.Uploadid, block_list_str)
return err
}
var _ driver.Driver = (*BaiduNetdisk)(nil)

View File

@ -0,0 +1,29 @@
package baidu_netdisk
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
RefreshToken string `json:"refresh_token" required:"true"`
driver.RootPath
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
}
var config = driver.Config{
Name: "BaiduNetdisk",
DefaultRoot: "/",
}
func New() driver.Driver {
return &BaiduNetdisk{}
}
func init() {
op.RegisterDriver(config, New)
}

View File

@ -0,0 +1,163 @@
package baidu_netdisk
import (
"strconv"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type TokenErrResp struct {
ErrorDescription string `json:"error_description"`
Error string `json:"error"`
}
type File struct {
//TkbindId int `json:"tkbind_id"`
//OwnerType int `json:"owner_type"`
//Category int `json:"category"`
//RealCategory string `json:"real_category"`
FsId int64 `json:"fs_id"`
ServerMtime int64 `json:"server_mtime"`
//OperId int `json:"oper_id"`
//ServerCtime int `json:"server_ctime"`
Thumbs struct {
//Icon string `json:"icon"`
Url3 string `json:"url3"`
//Url2 string `json:"url2"`
//Url1 string `json:"url1"`
} `json:"thumbs"`
//Wpfile int `json:"wpfile"`
//LocalMtime int `json:"local_mtime"`
Size int64 `json:"size"`
//ExtentTinyint7 int `json:"extent_tinyint7"`
Path string `json:"path"`
//Share int `json:"share"`
//ServerAtime int `json:"server_atime"`
//Pl int `json:"pl"`
//LocalCtime int `json:"local_ctime"`
ServerFilename string `json:"server_filename"`
//Md5 string `json:"md5"`
//OwnerId int `json:"owner_id"`
//Unlist int `json:"unlist"`
Isdir int `json:"isdir"`
}
func fileToObj(f File) *model.ObjThumb {
return &model.ObjThumb{
Object: model.Object{
ID: strconv.FormatInt(f.FsId, 10),
Name: f.ServerFilename,
Size: f.Size,
Modified: time.Unix(f.ServerMtime, 0),
IsFolder: f.Isdir == 1,
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
}
}
type ListResp struct {
Errno int `json:"errno"`
GuidInfo string `json:"guid_info"`
List []File `json:"list"`
RequestId int64 `json:"request_id"`
Guid int `json:"guid"`
}
type DownloadResp struct {
Errmsg string `json:"errmsg"`
Errno int `json:"errno"`
List []struct {
//Category int `json:"category"`
//DateTaken int `json:"date_taken,omitempty"`
Dlink string `json:"dlink"`
//Filename string `json:"filename"`
//FsId int64 `json:"fs_id"`
//Height int `json:"height,omitempty"`
//Isdir int `json:"isdir"`
//Md5 string `json:"md5"`
//OperId int `json:"oper_id"`
//Path string `json:"path"`
//ServerCtime int `json:"server_ctime"`
//ServerMtime int `json:"server_mtime"`
//Size int `json:"size"`
//Thumbs struct {
// Icon string `json:"icon,omitempty"`
// Url1 string `json:"url1,omitempty"`
// Url2 string `json:"url2,omitempty"`
// Url3 string `json:"url3,omitempty"`
//} `json:"thumbs"`
//Width int `json:"width,omitempty"`
} `json:"list"`
//Names struct {
//} `json:"names"`
RequestId string `json:"request_id"`
}
type DownloadResp2 struct {
Errno int `json:"errno"`
Info []struct {
//ExtentTinyint4 int `json:"extent_tinyint4"`
//ExtentTinyint1 int `json:"extent_tinyint1"`
//Bitmap string `json:"bitmap"`
//Category int `json:"category"`
//Isdir int `json:"isdir"`
//Videotag int `json:"videotag"`
Dlink string `json:"dlink"`
//OperID int64 `json:"oper_id"`
//PathMd5 int `json:"path_md5"`
//Wpfile int `json:"wpfile"`
//LocalMtime int `json:"local_mtime"`
/*Thumbs struct {
Icon string `json:"icon"`
URL3 string `json:"url3"`
URL2 string `json:"url2"`
URL1 string `json:"url1"`
} `json:"thumbs"`*/
//PlaySource int `json:"play_source"`
//Share int `json:"share"`
//FileKey string `json:"file_key"`
//Errno int `json:"errno"`
//LocalCtime int `json:"local_ctime"`
//Rotate int `json:"rotate"`
//Metadata time.Time `json:"metadata"`
//Height int `json:"height"`
//SampleRate int `json:"sample_rate"`
//Width int `json:"width"`
//OwnerType int `json:"owner_type"`
//Privacy int `json:"privacy"`
//ExtentInt3 int64 `json:"extent_int3"`
//RealCategory string `json:"real_category"`
//SrcLocation string `json:"src_location"`
//MetaInfo string `json:"meta_info"`
//ID string `json:"id"`
//Duration int `json:"duration"`
//FileSize string `json:"file_size"`
//Channels int `json:"channels"`
//UseSegment int `json:"use_segment"`
//ServerCtime int `json:"server_ctime"`
//Resolution string `json:"resolution"`
//OwnerID int `json:"owner_id"`
//ExtraInfo string `json:"extra_info"`
//Size int `json:"size"`
//FsID int64 `json:"fs_id"`
//ExtentTinyint3 int `json:"extent_tinyint3"`
//Md5 string `json:"md5"`
//Path string `json:"path"`
//FrameRate int `json:"frame_rate"`
//ExtentTinyint2 int `json:"extent_tinyint2"`
//ServerFilename string `json:"server_filename"`
//ServerMtime int `json:"server_mtime"`
//TkbindID int `json:"tkbind_id"`
} `json:"info"`
RequestID int64 `json:"request_id"`
}
type PrecreateResp struct {
Path string `json:"path"`
Uploadid string `json:"uploadid"`
ReturnType int `json:"return_type"`
BlockList []int `json:"block_list"`
Errno int `json:"errno"`
RequestId int64 `json:"request_id"`
}

View File

@ -0,0 +1,201 @@
package baidu_netdisk
import (
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
// do others that not defined in Driver interface
func (d *BaiduNetdisk) refreshToken() error {
err := d._refreshToken()
if err != nil && err == errs.EmptyToken {
err = d._refreshToken()
}
return err
}
func (d *BaiduNetdisk) _refreshToken() error {
u := "https://openapi.baidu.com/oauth/2.0/token"
var resp base.TokenResp
var e TokenErrResp
_, err := base.RestyClient.R().SetResult(&resp).SetError(&e).SetQueryParams(map[string]string{
"grant_type": "refresh_token",
"refresh_token": d.RefreshToken,
"client_id": d.ClientID,
"client_secret": d.ClientSecret,
}).Get(u)
if err != nil {
return err
}
if e.Error != "" {
return fmt.Errorf("%s : %s", e.Error, e.ErrorDescription)
}
if resp.RefreshToken == "" {
return errs.EmptyToken
}
d.AccessToken, d.RefreshToken = resp.AccessToken, resp.RefreshToken
op.MustSaveDriverStorage(d)
return nil
}
func (d *BaiduNetdisk) request(furl string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetQueryParam("access_token", d.AccessToken)
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, furl)
if err != nil {
return nil, err
}
errno := utils.Json.Get(res.Body(), "errno").ToInt()
if errno != 0 {
if errno == -6 {
err = d.refreshToken()
if err != nil {
return nil, err
}
return d.request(furl, method, callback, resp)
}
return nil, fmt.Errorf("errno: %d, refer to https://pan.baidu.com/union/doc/", errno)
}
return res.Body(), nil
}
func (d *BaiduNetdisk) get(pathname string, params map[string]string, resp interface{}) ([]byte, error) {
return d.request("https://pan.baidu.com/rest/2.0"+pathname, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(params)
}, resp)
}
func (d *BaiduNetdisk) post(pathname string, params map[string]string, data interface{}, resp interface{}) ([]byte, error) {
return d.request("https://pan.baidu.com/rest/2.0"+pathname, http.MethodPost, func(req *resty.Request) {
req.SetQueryParams(params)
req.SetBody(data)
}, resp)
}
func (d *BaiduNetdisk) getFiles(dir string) ([]File, error) {
start := 0
limit := 200
params := map[string]string{
"method": "list",
"dir": dir,
"web": "web",
}
if d.OrderBy != "" {
params["order"] = d.OrderBy
if d.OrderDirection == "desc" {
params["desc"] = "1"
}
}
res := make([]File, 0)
for {
params["start"] = strconv.Itoa(start)
params["limit"] = strconv.Itoa(limit)
start += limit
var resp ListResp
_, err := d.get("/xpan/file", params, &resp)
if err != nil {
return nil, err
}
if len(resp.List) == 0 {
break
}
res = append(res, resp.List...)
}
return res, nil
}
func (d *BaiduNetdisk) linkOfficial(file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp DownloadResp
params := map[string]string{
"method": "filemetas",
"fsids": fmt.Sprintf("[%s]", file.GetID()),
"dlink": "1",
}
_, err := d.get("/xpan/multimedia", params, &resp)
if err != nil {
return nil, err
}
u := fmt.Sprintf("%s&access_token=%s", resp.List[0].Dlink, d.AccessToken)
res, err := base.NoRedirectClient.R().SetHeader("User-Agent", "pan.baidu.com").Head(u)
if err != nil {
return nil, err
}
//if res.StatusCode() == 302 {
u = res.Header().Get("location")
//}
return &model.Link{
URL: u,
Header: http.Header{
"User-Agent": []string{"pan.baidu.com"},
},
}, nil
}
func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp DownloadResp2
param := map[string]string{
"target": fmt.Sprintf("[\"%s\"]", file.GetPath()),
"dlink": "1",
"web": "5",
"origin": "dlna",
}
_, err := d.request("https://pan.baidu.com/api/filemetas", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(param)
}, &resp)
if err != nil {
return nil, err
}
return &model.Link{
URL: resp.Info[0].Dlink,
Header: http.Header{
"User-Agent": []string{"pan.baidu.com"},
},
}, nil
}
func (d *BaiduNetdisk) manage(opera string, filelist interface{}) ([]byte, error) {
params := map[string]string{
"method": "filemanager",
"opera": opera,
}
marshal, err := utils.Json.Marshal(filelist)
if err != nil {
return nil, err
}
data := fmt.Sprintf("async=0&filelist=%s&ondup=newcopy", string(marshal))
return d.post("/xpan/file", params, data, nil)
}
func (d *BaiduNetdisk) create(path string, size int64, isdir int, uploadid, block_list string) ([]byte, error) {
params := map[string]string{
"method": "create",
}
data := fmt.Sprintf("path=%s&size=%d&isdir=%d", path, size, isdir)
if uploadid != "" {
data += fmt.Sprintf("&uploadid=%s&block_list=%s", uploadid, block_list)
}
return d.post("/xpan/file", params, data, nil)
}
func encodeURIComponent(str string) string {
r := url.QueryEscape(str)
r = strings.ReplaceAll(r, "+", "%20")
return r
}

View File

@ -0,0 +1,282 @@
package baiduphoto
import (
"context"
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"math"
"os"
"regexp"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
type BaiduPhoto struct {
model.Storage
Addition
AccessToken string
}
func (d *BaiduPhoto) Config() driver.Config {
return config
}
func (d *BaiduPhoto) GetAddition() driver.Additional {
return d.Addition
}
func (d *BaiduPhoto) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
return d.refreshToken()
}
func (d *BaiduPhoto) Drop(ctx context.Context) error {
return nil
}
func (d *BaiduPhoto) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
var objs []model.Obj
var err error
if IsRoot(dir) {
var albums []Album
if d.ShowType != "root_only_file" {
albums, err = d.GetAllAlbum(ctx)
if err != nil {
return nil, err
}
}
var files []File
if d.ShowType != "root_only_album" {
files, err = d.GetAllFile(ctx)
if err != nil {
return nil, err
}
}
alubmName := make(map[string]int)
objs, _ = utils.SliceConvert(albums, func(album Album) (model.Obj, error) {
i := alubmName[album.GetName()]
if i != 0 {
alubmName[album.GetName()]++
album.Title = fmt.Sprintf("%s(%d)", album.Title, i)
}
alubmName[album.GetName()]++
return &album, nil
})
for i := 0; i < len(files); i++ {
objs = append(objs, &files[i])
}
} else if IsAlbum(dir) || IsAlbumRoot(dir) {
var files []AlbumFile
files, err = d.GetAllAlbumFile(ctx, splitID(dir.GetID())[0], "")
if err != nil {
return nil, err
}
objs = make([]model.Obj, 0, len(files))
for i := 0; i < len(files); i++ {
objs = append(objs, &files[i])
}
}
return objs, nil
}
func (d *BaiduPhoto) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if IsAlbumFile(file) {
return d.linkAlbum(ctx, file, args)
} else if IsFile(file) {
return d.linkFile(ctx, file, args)
}
return nil, errs.NotFile
}
func (d *BaiduPhoto) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
if IsRoot(parentDir) {
code := regexp.MustCompile(`(?i)join:([\S]*)`).FindStringSubmatch(dirName)
if len(code) > 1 {
return d.JoinAlbum(ctx, code[1])
}
return d.CreateAlbum(ctx, dirName)
}
return errs.NotSupport
}
func (d *BaiduPhoto) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
if IsFile(srcObj) {
if IsAlbum(dstDir) {
//rootfile -> album
e := splitID(dstDir.GetID())
return d.AddAlbumFile(ctx, e[0], e[1], srcObj.GetID())
}
} else if IsAlbumFile(srcObj) {
if IsRoot(dstDir) {
//albumfile -> root
e := splitID(srcObj.GetID())
_, err := d.CopyAlbumFile(ctx, e[1], e[2], e[3], srcObj.GetID())
return err
} else if IsAlbum(dstDir) {
// albumfile -> root -> album
e := splitID(srcObj.GetID())
file, err := d.CopyAlbumFile(ctx, e[1], e[2], e[3], srcObj.GetID())
if err != nil {
return err
}
e = splitID(dstDir.GetID())
return d.AddAlbumFile(ctx, e[0], e[1], fmt.Sprint(file.Fsid))
}
}
return errs.NotSupport
}
func (d *BaiduPhoto) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
// 仅支持相册之间移动
if IsAlbumFile(srcObj) && IsAlbum(dstDir) {
err := d.Copy(ctx, srcObj, dstDir)
if err != nil {
return err
}
e := splitID(srcObj.GetID())
return d.DeleteAlbumFile(ctx, e[1], e[2], srcObj.GetID())
}
return errs.NotSupport
}
func (d *BaiduPhoto) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
// 仅支持相册改名
if IsAlbum(srcObj) {
e := splitID(srcObj.GetID())
return d.SetAlbumName(ctx, e[0], e[1], newName)
}
return errs.NotSupport
}
func (d *BaiduPhoto) Remove(ctx context.Context, obj model.Obj) error {
e := splitID(obj.GetID())
if IsFile(obj) {
return d.DeleteFile(ctx, e[0])
} else if IsAlbum(obj) {
return d.DeleteAlbum(ctx, e[0], e[1])
} else if IsAlbumFile(obj) {
return d.DeleteAlbumFile(ctx, e[1], e[2], obj.GetID())
}
return errs.NotSupport
}
func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
// 计算需要的数据
const DEFAULT = 1 << 22
const SliceSize = 1 << 18
count := int(math.Ceil(float64(stream.GetSize()) / float64(DEFAULT)))
sliceMD5List := make([]string, 0, count)
fileMd5 := md5.New()
sliceMd5 := md5.New()
sliceMd52 := md5.New()
slicemd52Write := utils.LimitWriter(sliceMd52, SliceSize)
for i := 1; i <= count; i++ {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
_, err := io.CopyN(io.MultiWriter(fileMd5, sliceMd5, slicemd52Write), tempFile, DEFAULT)
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
return err
}
sliceMD5List = append(sliceMD5List, hex.EncodeToString(sliceMd5.Sum(nil)))
sliceMd5.Reset()
}
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
return err
}
content_md5 := hex.EncodeToString(fileMd5.Sum(nil))
slice_md5 := hex.EncodeToString(sliceMd52.Sum(nil))
// 开始执行上传
params := map[string]string{
"autoinit": "1",
"isdir": "0",
"rtype": "1",
"ctype": "11",
"path": stream.GetName(),
"size": fmt.Sprint(stream.GetSize()),
"slice-md5": slice_md5,
"content-md5": content_md5,
"block_list": MustString(utils.Json.MarshalToString(sliceMD5List)),
}
// 预上传
var precreateResp PrecreateResp
_, err = d.Post(FILE_API_URL_V1+"/precreate", func(r *resty.Request) {
r.SetContext(ctx)
r.SetFormData(params)
}, &precreateResp)
if err != nil {
return err
}
switch precreateResp.ReturnType {
case 1: // 上传文件
uploadParams := map[string]string{
"method": "upload",
"path": params["path"],
"uploadid": precreateResp.UploadID,
}
for i := 0; i < count; i++ {
uploadParams["partseq"] = fmt.Sprint(i)
_, err = d.Post("https://c3.pcs.baidu.com/rest/2.0/pcs/superfile2", func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParams(uploadParams)
r.SetFileReader("file", stream.GetName(), io.LimitReader(tempFile, DEFAULT))
}, nil)
if err != nil {
return err
}
up(i * 100 / count)
}
fallthrough
case 2: // 创建文件
params["uploadid"] = precreateResp.UploadID
_, err = d.Post(FILE_API_URL_V1+"/create", func(r *resty.Request) {
r.SetContext(ctx)
r.SetFormData(params)
}, &precreateResp)
if err != nil {
return err
}
fallthrough
case 3: // 增加到相册
if IsAlbum(dstDir) || IsAlbumRoot(dstDir) {
e := splitID(dstDir.GetID())
err = d.AddAlbumFile(ctx, e[0], e[1], fmt.Sprint(precreateResp.Data.FsID))
if err != nil {
return err
}
}
}
return nil
}
var _ driver.Driver = (*BaiduPhoto)(nil)

107
drivers/baidu_photo/help.go Normal file
View File

@ -0,0 +1,107 @@
package baiduphoto
import (
"fmt"
"math"
"math/rand"
"regexp"
"strings"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
//Tid生成
func getTid() string {
return fmt.Sprintf("3%d%.0f", time.Now().Unix(), math.Floor(9000000*rand.Float64()+1000000))
}
// 检查名称
func checkName(name string) bool {
return len(name) <= 20 && regexp.MustCompile("[\u4e00-\u9fa5A-Za-z0-9_-]").MatchString(name)
}
func toTime(t int64) *time.Time {
tm := time.Unix(t, 0)
return &tm
}
func fsidsFormat(ids ...string) string {
var buf []string
for _, id := range ids {
e := splitID(id)
buf = append(buf, fmt.Sprintf(`{"fsid":%s,"uk":%s}`, e[0], e[3]))
}
return fmt.Sprintf("[%s]", strings.Join(buf, ","))
}
func fsidsFormatNotUk(ids ...string) string {
var buf []string
for _, id := range ids {
buf = append(buf, fmt.Sprintf(`{"fsid":%s}`, splitID(id)[0]))
}
return fmt.Sprintf("[%s]", strings.Join(buf, ","))
}
/*
结构
{fsid} 文件
{album_id}|{tid} 相册
{fsid}|{album_id}|{tid}|{uk} 相册文件
*/
func splitID(id string) []string {
return strings.SplitN(id, "|", 4)[:4]
}
/*
结构
{fsid} 文件
{album_id}|{tid} 相册
{fsid}|{album_id}|{tid}|{uk} 相册文件
*/
func joinID(ids ...interface{}) string {
idsStr := make([]string, 0, len(ids))
for _, id := range ids {
idsStr = append(idsStr, fmt.Sprint(id))
}
return strings.Join(idsStr, "|")
}
func getFileName(path string) string {
return path[strings.LastIndex(path, "/")+1:]
}
// 相册
func IsAlbum(obj model.Obj) bool {
return obj.IsDir() && obj.GetPath() == "album"
}
// 根目录
func IsRoot(obj model.Obj) bool {
return obj.IsDir() && obj.GetPath() == "" && obj.GetID() == ""
}
// 以相册为根目录
func IsAlbumRoot(obj model.Obj) bool {
return obj.IsDir() && obj.GetPath() == "" && obj.GetID() != ""
}
// 根文件
func IsFile(obj model.Obj) bool {
return !obj.IsDir() && obj.GetPath() == "file"
}
// 相册文件
func IsAlbumFile(obj model.Obj) bool {
return !obj.IsDir() && obj.GetPath() == "albumfile"
}
func MustString(str string, err error) string {
return str
}

View File

@ -0,0 +1,30 @@
package baiduphoto
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
RefreshToken string `json:"refresh_token" required:"true"`
ShowType string `json:"show_type" type:"select" options:"root,root_only_album,root_only_file" default:"root"`
AlbumID string `json:"album_id"`
//AlbumPassword string `json:"album_password"`
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
}
func (a Addition) GetRootId() string {
return a.AlbumID
}
var config = driver.Config{
Name: "BaiduPhoto",
LocalSort: true,
}
func init() {
op.RegisterDriver(config, func() driver.Driver {
return &BaiduPhoto{}
})
}

View File

@ -0,0 +1,169 @@
package baiduphoto
import (
"fmt"
"time"
)
type TokenErrResp struct {
ErrorDescription string `json:"error_description"`
ErrorMsg string `json:"error"`
}
func (e *TokenErrResp) Error() string {
return fmt.Sprint(e.ErrorMsg, " : ", e.ErrorDescription)
}
type Erron struct {
Errno int `json:"errno"`
RequestID int `json:"request_id"`
}
type Page struct {
HasMore int `json:"has_more"`
Cursor string `json:"cursor"`
}
func (p Page) HasNextPage() bool {
return p.HasMore == 1
}
type (
FileListResp struct {
Page
List []File `json:"list"`
}
File struct {
Fsid int64 `json:"fsid"` // 文件ID
Path string `json:"path"` // 文件路径
Size int64 `json:"size"`
Ctime int64 `json:"ctime"` // 创建时间 s
Mtime int64 `json:"mtime"` // 修改时间 s
Thumburl []string `json:"thumburl"`
parseTime *time.Time
}
)
func (c *File) GetSize() int64 { return c.Size }
func (c *File) GetName() string { return getFileName(c.Path) }
func (c *File) ModTime() time.Time {
if c.parseTime == nil {
c.parseTime = toTime(c.Mtime)
}
return *c.parseTime
}
func (c *File) IsDir() bool { return false }
func (c *File) GetID() string { return joinID(c.Fsid) }
func (c *File) GetPath() string { return "file" }
func (c *File) Thumb() string {
if len(c.Thumburl) > 0 {
return c.Thumburl[0]
}
return ""
}
/*相册部分*/
type (
AlbumListResp struct {
Page
List []Album `json:"list"`
Reset int64 `json:"reset"`
TotalCount int64 `json:"total_count"`
}
Album struct {
AlbumID string `json:"album_id"`
Tid int64 `json:"tid"`
Title string `json:"title"`
JoinTime int64 `json:"join_time"`
CreateTime int64 `json:"create_time"`
Mtime int64 `json:"mtime"`
parseTime *time.Time
}
AlbumFileListResp struct {
Page
List []AlbumFile `json:"list"`
Reset int64 `json:"reset"`
TotalCount int64 `json:"total_count"`
}
AlbumFile struct {
File
AlbumID string `json:"album_id"`
Tid int64 `json:"tid"`
Uk int64 `json:"uk"`
}
)
func (a *Album) GetSize() int64 { return 0 }
func (a *Album) GetName() string { return fmt.Sprint(a.Title) }
func (a *Album) ModTime() time.Time {
if a.parseTime == nil {
a.parseTime = toTime(a.Mtime)
}
return *a.parseTime
}
func (a *Album) IsDir() bool { return true }
func (a *Album) GetID() string { return joinID(a.AlbumID, a.Tid) }
func (a *Album) GetPath() string { return "album" }
func (af *AlbumFile) GetID() string { return joinID(af.Fsid, af.AlbumID, af.Tid, af.Uk) }
func (c *AlbumFile) GetPath() string { return "albumfile" }
type (
CopyFileResp struct {
List []CopyFile `json:"list"`
}
CopyFile struct {
FromFsid int64 `json:"from_fsid"` // 源ID
Fsid int64 `json:"fsid"` // 目标ID
Path string `json:"path"`
ShootTime int `json:"shoot_time"`
}
)
/*上传部分*/
type (
UploadFile struct {
FsID int64 `json:"fs_id"`
Size int64 `json:"size"`
Md5 string `json:"md5"`
ServerFilename string `json:"server_filename"`
Path string `json:"path"`
Ctime int `json:"ctime"`
Mtime int `json:"mtime"`
Isdir int `json:"isdir"`
Category int `json:"category"`
ServerMd5 string `json:"server_md5"`
ShootTime int `json:"shoot_time"`
}
CreateFileResp struct {
Data UploadFile `json:"data"`
}
PrecreateResp struct {
ReturnType int `json:"return_type"` //存在返回2 不存在返回1 已经保存3
//存在返回
CreateFileResp
//不存在返回
Path string `json:"path"`
UploadID string `json:"uploadid"`
Blocklist []int64 `json:"block_list"`
}
)
type InviteResp struct {
Pdata struct {
// 邀请码
InviteCode string `json:"invite_code"`
// 有效时间
ExpireTime int `json:"expire_time"`
ShareID string `json:"share_id"`
} `json:"pdata"`
}

View File

@ -0,0 +1,376 @@
package baiduphoto
import (
"context"
"errors"
"fmt"
"net/http"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
const (
API_URL = "https://photo.baidu.com/youai"
ALBUM_API_URL = API_URL + "/album/v1"
FILE_API_URL_V1 = API_URL + "/file/v1"
FILE_API_URL_V2 = API_URL + "/file/v2"
)
var (
ErrNotSupportName = errors.New("only chinese and english, numbers and underscores are supported, and the length is no more than 20")
)
func (p *BaiduPhoto) Request(furl string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R().
SetQueryParam("access_token", p.AccessToken)
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, furl)
if err != nil {
return nil, err
}
erron := utils.Json.Get(res.Body(), "errno").ToInt()
switch erron {
case 0:
break
case 50805:
return nil, fmt.Errorf("you have joined album")
case 50820:
return nil, fmt.Errorf("no shared albums found")
case -6:
if err = p.refreshToken(); err != nil {
return nil, err
}
default:
return nil, fmt.Errorf("errno: %d, refer to https://photo.baidu.com/union/doc", erron)
}
return res.Body(), nil
}
func (p *BaiduPhoto) refreshToken() error {
u := "https://openapi.baidu.com/oauth/2.0/token"
var resp base.TokenResp
var e TokenErrResp
_, err := base.RestyClient.R().SetResult(&resp).SetError(&e).SetQueryParams(map[string]string{
"grant_type": "refresh_token",
"refresh_token": p.RefreshToken,
"client_id": p.ClientID,
"client_secret": p.ClientSecret,
}).Get(u)
if err != nil {
return err
}
if e.ErrorMsg != "" {
return &e
}
if resp.RefreshToken == "" {
return errs.EmptyToken
}
p.AccessToken, p.RefreshToken = resp.AccessToken, resp.RefreshToken
op.MustSaveDriverStorage(p)
return nil
}
func (p *BaiduPhoto) Get(furl string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
return p.Request(furl, http.MethodGet, callback, resp)
}
func (p *BaiduPhoto) Post(furl string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
return p.Request(furl, http.MethodPost, callback, resp)
}
// 获取所有文件
func (p *BaiduPhoto) GetAllFile(ctx context.Context) (files []File, err error) {
var cursor string
for {
var resp FileListResp
_, err = p.Get(FILE_API_URL_V1+"/list", func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParams(map[string]string{
"need_thumbnail": "1",
"need_filter_hidden": "0",
"cursor": cursor,
})
}, &resp)
if err != nil {
return
}
files = append(files, resp.List...)
if !resp.HasNextPage() {
return
}
cursor = resp.Cursor
}
}
// 删除根文件
func (p *BaiduPhoto) DeleteFile(ctx context.Context, fileIDs ...string) error {
_, err := p.Get(FILE_API_URL_V1+"/delete", func(req *resty.Request) {
req.SetContext(ctx)
req.SetQueryParams(map[string]string{
"fsid_list": fmt.Sprintf("[%s]", strings.Join(fileIDs, ",")),
})
}, nil)
return err
}
// 获取所有相册
func (p *BaiduPhoto) GetAllAlbum(ctx context.Context) (albums []Album, err error) {
var cursor string
for {
var resp AlbumListResp
_, err = p.Get(ALBUM_API_URL+"/list", func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParams(map[string]string{
"need_amount": "1",
"limit": "100",
"cursor": cursor,
})
}, &resp)
if err != nil {
return
}
if albums == nil {
albums = make([]Album, 0, resp.TotalCount)
}
cursor = resp.Cursor
albums = append(albums, resp.List...)
if !resp.HasNextPage() {
return
}
}
}
// 获取相册中所有文件
func (p *BaiduPhoto) GetAllAlbumFile(ctx context.Context, albumID, passwd string) (files []AlbumFile, err error) {
var cursor string
for {
var resp AlbumFileListResp
_, err = p.Get(ALBUM_API_URL+"/listfile", func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParams(map[string]string{
"album_id": albumID,
"need_amount": "1",
"limit": "1000",
"passwd": passwd,
"cursor": cursor,
})
}, &resp)
if err != nil {
return
}
if files == nil {
files = make([]AlbumFile, 0, resp.TotalCount)
}
cursor = resp.Cursor
files = append(files, resp.List...)
if !resp.HasNextPage() {
return
}
}
}
// 创建相册
func (p *BaiduPhoto) CreateAlbum(ctx context.Context, name string) error {
if !checkName(name) {
return ErrNotSupportName
}
_, err := p.Post(ALBUM_API_URL+"/create", func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParams(map[string]string{
"title": name,
"tid": getTid(),
"source": "0",
})
}, nil)
return err
}
// 相册改名
func (p *BaiduPhoto) SetAlbumName(ctx context.Context, albumID, tID, name string) error {
if !checkName(name) {
return ErrNotSupportName
}
_, err := p.Post(ALBUM_API_URL+"/settitle", func(r *resty.Request) {
r.SetContext(ctx)
r.SetFormData(map[string]string{
"title": name,
"album_id": albumID,
"tid": tID,
})
}, nil)
return err
}
// 删除相册
func (p *BaiduPhoto) DeleteAlbum(ctx context.Context, albumID, tID string) error {
_, err := p.Post(ALBUM_API_URL+"/delete", func(r *resty.Request) {
r.SetContext(ctx)
r.SetFormData(map[string]string{
"album_id": albumID,
"tid": tID,
"delete_origin_image": "0", // 是否删除原图 0 不删除 1 删除
})
}, nil)
return err
}
// 删除相册文件
func (p *BaiduPhoto) DeleteAlbumFile(ctx context.Context, albumID, tID string, fileIDs ...string) error {
_, err := p.Post(ALBUM_API_URL+"/delfile", func(r *resty.Request) {
r.SetContext(ctx)
r.SetFormData(map[string]string{
"album_id": albumID,
"tid": tID,
"list": fsidsFormat(fileIDs...),
"del_origin": "0", // 是否删除原图 0 不删除 1 删除
})
}, nil)
return err
}
// 增加相册文件
func (p *BaiduPhoto) AddAlbumFile(ctx context.Context, albumID, tID string, fileIDs ...string) error {
_, err := p.Get(ALBUM_API_URL+"/addfile", func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParams(map[string]string{
"album_id": albumID,
"tid": tID,
"list": fsidsFormatNotUk(fileIDs...),
})
}, nil)
return err
}
// 保存相册文件为根文件
func (p *BaiduPhoto) CopyAlbumFile(ctx context.Context, albumID, tID, uk string, fileID ...string) (*CopyFile, error) {
var resp CopyFileResp
_, err := p.Post(ALBUM_API_URL+"/copyfile", func(r *resty.Request) {
r.SetContext(ctx)
r.SetFormData(map[string]string{
"album_id": albumID,
"tid": tID,
"uk": uk,
"list": fsidsFormatNotUk(fileID...),
})
r.SetResult(&resp)
}, nil)
if err != nil {
return nil, err
}
return &resp.List[0], nil
}
// 加入相册
func (p *BaiduPhoto) JoinAlbum(ctx context.Context, code string) error {
var resp InviteResp
_, err := p.Get(ALBUM_API_URL+"/querypcode", func(req *resty.Request) {
req.SetContext(ctx)
req.SetQueryParams(map[string]string{
"pcode": code,
"web": "1",
})
}, &resp)
if err != nil {
return err
}
_, err = p.Get(ALBUM_API_URL+"/join", func(req *resty.Request) {
req.SetContext(ctx)
req.SetQueryParams(map[string]string{
"invite_code": resp.Pdata.InviteCode,
})
}, nil)
return err
}
func (d *BaiduPhoto) linkAlbum(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
headers := map[string]string{
"User-Agent": base.UserAgent,
}
if args.Header.Get("User-Agent") != "" {
headers["User-Agent"] = args.Header.Get("User-Agent")
}
if !utils.IsLocalIPAddr(args.IP) {
headers["X-Forwarded-For"] = args.IP
}
e := splitID(file.GetID())
res, err := base.NoRedirectClient.R().
SetContext(ctx).
SetHeaders(headers).
SetQueryParams(map[string]string{
"access_token": d.AccessToken,
"fsid": e[0],
"album_id": e[1],
"tid": e[2],
"uk": e[3],
}).
Head(ALBUM_API_URL + "/download")
if err != nil {
return nil, err
}
//exp := 8 * time.Hour
link := &model.Link{
URL: res.Header().Get("location"),
Header: http.Header{
"User-Agent": []string{headers["User-Agent"]},
},
//Expiration: &exp,
}
return link, nil
}
func (d *BaiduPhoto) linkFile(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
headers := map[string]string{
"User-Agent": base.UserAgent,
}
if args.Header.Get("User-Agent") != "" {
headers["User-Agent"] = args.Header.Get("User-Agent")
}
if !utils.IsLocalIPAddr(args.IP) {
headers["X-Forwarded-For"] = args.IP
}
var downloadUrl struct {
Dlink string `json:"dlink"`
}
_, err := d.Get(FILE_API_URL_V2+"/download", func(r *resty.Request) {
r.SetContext(ctx)
r.SetHeaders(headers)
r.SetQueryParams(map[string]string{
"fsid": splitID(file.GetID())[0],
})
}, &downloadUrl)
if err != nil {
return nil, err
}
//exp := 8 * time.Hour
link := &model.Link{
URL: downloadUrl.Dlink,
Header: http.Header{
"User-Agent": []string{headers["User-Agent"]},
},
//Expiration: &exp,
}
return link, nil
}

30
drivers/base/client.go Normal file
View File

@ -0,0 +1,30 @@
package base
import (
"net/http"
"time"
"github.com/go-resty/resty/v2"
)
var NoRedirectClient *resty.Client
var RestyClient = NewRestyClient()
var HttpClient = &http.Client{}
var UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36"
var DefaultTimeout = time.Second * 10
func init() {
NoRedirectClient = resty.New().SetRedirectPolicy(
resty.RedirectPolicyFunc(func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
}),
)
NoRedirectClient.SetHeader("user-agent", UserAgent)
}
func NewRestyClient() *resty.Client {
return resty.New().
SetHeader("user-agent", UserAgent).
SetRetryCount(3).
SetTimeout(DefaultTimeout)
}

12
drivers/base/types.go Normal file
View File

@ -0,0 +1,12 @@
package base
import "github.com/go-resty/resty/v2"
type Json map[string]interface{}
type TokenResp struct {
AccessToken string `json:"access_token"`
RefreshToken string `json:"refresh_token"`
}
type ReqCallback func(req *resty.Request)

130
drivers/ftp/driver.go Normal file
View File

@ -0,0 +1,130 @@
package ftp
import (
"context"
stdpath "path"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/jlaffaye/ftp"
)
type FTP struct {
model.Storage
Addition
conn *ftp.ServerConn
}
func (d *FTP) Config() driver.Config {
return config
}
func (d *FTP) GetAddition() driver.Additional {
return d.Addition
}
func (d *FTP) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
return d.login()
}
func (d *FTP) Drop(ctx context.Context) error {
if d.conn != nil {
_ = d.conn.Logout()
}
return nil
}
func (d *FTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if err := d.login(); err != nil {
return nil, err
}
entries, err := d.conn.List(dir.GetPath())
if err != nil {
return nil, err
}
res := make([]model.Obj, 0)
for i, _ := range entries {
entry := entries[i]
if entry.Name == "." || entry.Name == ".." {
continue
}
f := model.Object{
Name: entry.Name,
Size: int64(entry.Size),
Modified: entry.Time,
IsFolder: entry.Type == ftp.EntryTypeFolder,
}
res = append(res, &f)
}
return res, nil
}
//func (d *FTP) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *FTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if err := d.login(); err != nil {
return nil, err
}
resp, err := d.conn.Retr(file.GetPath())
if err != nil {
return nil, err
}
return &model.Link{
Data: resp,
}, nil
}
func (d *FTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
if err := d.login(); err != nil {
return err
}
return d.conn.MakeDir(stdpath.Join(parentDir.GetPath(), dirName))
}
func (d *FTP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
if err := d.login(); err != nil {
return err
}
return d.conn.Rename(srcObj.GetPath(), stdpath.Join(dstDir.GetPath(), srcObj.GetName()))
}
func (d *FTP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
if err := d.login(); err != nil {
return err
}
return d.conn.Rename(srcObj.GetPath(), stdpath.Join(stdpath.Dir(srcObj.GetPath()), newName))
}
func (d *FTP) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *FTP) Remove(ctx context.Context, obj model.Obj) error {
if err := d.login(); err != nil {
return err
}
if obj.IsDir() {
return d.conn.RemoveDirRecur(obj.GetPath())
} else {
return d.conn.Delete(obj.GetPath())
}
}
func (d *FTP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
if err := d.login(); err != nil {
return err
}
return d.conn.Stor(stdpath.Join(dstDir.GetPath(), stream.GetName()), stream)
}
var _ driver.Driver = (*FTP)(nil)

28
drivers/ftp/meta.go Normal file
View File

@ -0,0 +1,28 @@
package ftp
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Address string `json:"address" required:"true"`
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
driver.RootPath
}
var config = driver.Config{
Name: "FTP",
LocalSort: true,
OnlyLocal: true,
DefaultRoot: "/",
}
func New() driver.Driver {
return &FTP{}
}
func init() {
op.RegisterDriver(config, New)
}

1
drivers/ftp/types.go Normal file
View File

@ -0,0 +1 @@
package ftp

23
drivers/ftp/util.go Normal file
View File

@ -0,0 +1,23 @@
package ftp
import "github.com/jlaffaye/ftp"
// do others that not defined in Driver interface
func (d *FTP) login() error {
if d.conn != nil {
_, err := d.conn.CurrentDir()
if err == nil {
return nil
}
}
conn, err := ftp.Dial(d.Address)
if err != nil {
return err
}
err = conn.Login(d.Username, d.Password)
if err != nil {
return err
}
return nil
}

View File

@ -0,0 +1,144 @@
package google_drive
import (
"context"
"fmt"
"net/http"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
type GoogleDrive struct {
model.Storage
Addition
AccessToken string
}
func (d *GoogleDrive) Config() driver.Config {
return config
}
func (d *GoogleDrive) GetAddition() driver.Additional {
return d.Addition
}
func (d *GoogleDrive) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
return d.refreshToken()
}
func (d *GoogleDrive) Drop(ctx context.Context) error {
return nil
}
func (d *GoogleDrive) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
//func (d *GoogleDrive) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *GoogleDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
url := fmt.Sprintf("https://www.googleapis.com/drive/v3/files/%s?includeItemsFromAllDrives=true&supportsAllDrives=true", file.GetID())
link := model.Link{
URL: url + "&alt=media",
Header: http.Header{
"Authorization": []string{"Bearer " + d.AccessToken},
},
}
return &link, nil
}
func (d *GoogleDrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"name": dirName,
"parents": []string{parentDir.GetID()},
"mimeType": "application/vnd.google-apps.folder",
}
_, err := d.request("https://www.googleapis.com/drive/v3/files", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *GoogleDrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
query := map[string]string{
"addParents": dstDir.GetID(),
"removeParents": "root",
}
url := "https://www.googleapis.com/drive/v3/files/" + srcObj.GetID()
_, err := d.request(url, http.MethodPatch, func(req *resty.Request) {
req.SetQueryParams(query)
}, nil)
return err
}
func (d *GoogleDrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
data := base.Json{
"name": newName,
}
url := "https://www.googleapis.com/drive/v3/files/" + srcObj.GetID()
_, err := d.request(url, http.MethodPatch, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *GoogleDrive) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *GoogleDrive) Remove(ctx context.Context, obj model.Obj) error {
url := "https://www.googleapis.com/drive/v3/files/" + obj.GetID()
_, err := d.request(url, http.MethodDelete, nil, nil)
return err
}
func (d *GoogleDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
data := base.Json{
"name": stream.GetName(),
"parents": []string{dstDir.GetID()},
}
var e Error
url := "https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&supportsAllDrives=true"
res, err := base.NoRedirectClient.R().SetHeader("Authorization", "Bearer "+d.AccessToken).
SetError(&e).SetBody(data).
Post(url)
if err != nil {
return err
}
if e.Error.Code != 0 {
if e.Error.Code == 401 {
err = d.refreshToken()
if err != nil {
return err
}
return d.Put(ctx, dstDir, stream, up)
}
return fmt.Errorf("%s: %v", e.Error.Message, e.Error.Errors)
}
putUrl := res.Header().Get("location")
_, err = d.request(putUrl, http.MethodPut, func(req *resty.Request) {
req.SetBody(stream.GetReadCloser())
}, nil)
return err
}
var _ driver.Driver = (*GoogleDrive)(nil)

View File

@ -0,0 +1,29 @@
package google_drive
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootID
RefreshToken string `json:"refresh_token" required:"true"`
OrderBy string `json:"order_by" type:"string" help:"such as: folder,name,modifiedTime"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc"`
ClientID string `json:"client_id" required:"true" default:"202264815644.apps.googleusercontent.com"`
ClientSecret string `json:"client_secret" required:"true" default:"X4Z3ca8xfWDb1Voo-F9a7ZxJ"`
}
var config = driver.Config{
Name: "GoogleDrive",
OnlyProxy: true,
DefaultRoot: "root",
}
func New() driver.Driver {
return &GoogleDrive{}
}
func init() {
op.RegisterDriver(config, New)
}

View File

@ -0,0 +1,55 @@
package google_drive
import (
"strconv"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type TokenError struct {
Error string `json:"error"`
ErrorDescription string `json:"error_description"`
}
type Files struct {
NextPageToken string `json:"nextPageToken"`
Files []File `json:"files"`
}
type File struct {
Id string `json:"id"`
Name string `json:"name"`
MimeType string `json:"mimeType"`
ModifiedTime time.Time `json:"modifiedTime"`
Size string `json:"size"`
ThumbnailLink string `json:"thumbnailLink"`
}
func fileToObj(f File) *model.ObjThumb {
size, _ := strconv.ParseInt(f.Size, 10, 64)
return &model.ObjThumb{
Object: model.Object{
ID: f.Id,
Name: f.Name,
Size: size,
Modified: time.Time{},
IsFolder: f.MimeType == "application/vnd.google-apps.folder",
},
Thumbnail: model.Thumbnail{},
}
}
type Error struct {
Error struct {
Errors []struct {
Domain string `json:"domain"`
Reason string `json:"reason"`
Message string `json:"message"`
LocationType string `json:"location_type"`
Location string `json:"location"`
}
Code int `json:"code"`
Message string `json:"message"`
} `json:"error"`
}

View File

@ -0,0 +1,97 @@
package google_drive
import (
"fmt"
"net/http"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *GoogleDrive) refreshToken() error {
url := "https://www.googleapis.com/oauth2/v4/token"
var resp base.TokenResp
var e TokenError
res, err := base.RestyClient.R().SetResult(&resp).SetError(&e).
SetFormData(map[string]string{
"client_id": d.ClientID,
"client_secret": d.ClientSecret,
"refresh_token": d.RefreshToken,
"grant_type": "refresh_token",
}).Post(url)
if err != nil {
return err
}
log.Debug(res.String())
if e.Error != "" {
return fmt.Errorf(e.Error)
}
d.AccessToken = resp.AccessToken
return nil
}
func (d *GoogleDrive) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
req.SetQueryParam("includeItemsFromAllDrives", "true")
req.SetQueryParam("supportsAllDrives", "true")
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e Error
req.SetError(&e)
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
if e.Error.Code != 0 {
if e.Error.Code == 401 {
err = d.refreshToken()
if err != nil {
return nil, err
}
return d.request(url, method, callback, resp)
}
return nil, fmt.Errorf("%s: %v", e.Error.Message, e.Error.Errors)
}
return res.Body(), nil
}
func (d *GoogleDrive) getFiles(id string) ([]File, error) {
pageToken := "first"
res := make([]File, 0)
for pageToken != "" {
if pageToken == "first" {
pageToken = ""
}
var resp Files
orderBy := "folder,name,modifiedTime desc"
if d.OrderBy != "" {
orderBy = d.OrderBy + " " + d.OrderDirection
}
query := map[string]string{
"orderBy": orderBy,
"fields": "files(id,name,mimeType,size,modifiedTime,thumbnailLink),nextPageToken",
"pageSize": "1000",
"q": fmt.Sprintf("'%s' in parents and trashed = false", id),
//"includeItemsFromAllDrives": "true",
//"supportsAllDrives": "true",
"pageToken": pageToken,
}
_, err := d.request("https://www.googleapis.com/drive/v3/files", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
pageToken = resp.NextPageToken
res = append(res, resp.Files...)
}
return res, nil
}

View File

@ -1,18 +1,26 @@
package local
import (
"bytes"
"context"
"github.com/alist-org/alist/v3/internal/errs"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
stdpath "path"
"path/filepath"
"strconv"
"strings"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/operations"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/errors"
"github.com/alist-org/alist/v3/server/common"
"github.com/disintegration/imaging"
)
type Local struct {
@ -28,21 +36,19 @@ func (d *Local) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return errors.Wrap(err, "error while unmarshal addition")
return err
}
if !utils.Exists(d.RootFolder) {
err = errors.Errorf("root folder %s not exists", d.RootFolder)
d.SetStatus(err.Error())
if !utils.Exists(d.GetRootPath()) {
err = fmt.Errorf("root folder %s not exists", d.GetRootPath())
} else {
if !filepath.IsAbs(d.RootFolder) {
d.RootFolder, err = filepath.Abs(d.RootFolder)
if !filepath.IsAbs(d.GetRootPath()) {
abs, err := filepath.Abs(d.GetRootPath())
if err != nil {
return errors.Wrap(err, "error while get abs path")
return err
}
d.SetRootPath(abs)
}
d.SetStatus("OK")
}
operations.MustSaveDriverStorage(d)
return err
}
@ -54,22 +60,33 @@ func (d *Local) GetAddition() driver.Additional {
return d.Addition
}
func (d *Local) List(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
fullPath := dir.GetID()
func (d *Local) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
fullPath := dir.GetPath()
rawFiles, err := ioutil.ReadDir(fullPath)
if err != nil {
return nil, errors.Wrapf(err, "error while read dir %s", fullPath)
return nil, err
}
var files []model.Obj
for _, f := range rawFiles {
if strings.HasPrefix(f.Name(), ".") {
continue
}
file := model.Object{
Name: f.Name(),
Modified: f.ModTime(),
Size: f.Size(),
IsFolder: f.IsDir(),
thumb := ""
if d.Thumbnail && utils.GetFileType(f.Name()) == conf.IMAGE {
thumb = common.GetApiUrl(nil) + stdpath.Join("/d", args.ReqPath, f.Name())
thumb = utils.EncodePath(thumb, true)
thumb += "?type=thumb"
}
file := model.ObjThumb{
Object: model.Object{
Name: f.Name(),
Modified: f.ModTime(),
Size: f.Size(),
IsFolder: f.IsDir(),
},
Thumbnail: model.Thumbnail{
Thumbnail: thumb,
},
}
files = append(files, &file)
}
@ -79,10 +96,13 @@ func (d *Local) List(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
func (d *Local) Get(ctx context.Context, path string) (model.Obj, error) {
f, err := os.Stat(path)
if err != nil {
return nil, errors.Wrapf(err, "error while stat %s", path)
if strings.Contains(err.Error(), "cannot find the file") {
return nil, errs.ObjectNotFound
}
return nil, err
}
file := model.Object{
ID: path,
Path: path,
Name: f.Name(),
Modified: f.ModTime(),
Size: f.Size(),
@ -92,53 +112,75 @@ func (d *Local) Get(ctx context.Context, path string) (model.Obj, error) {
}
func (d *Local) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
fullPath := file.GetID()
link := model.Link{
FilePath: &fullPath,
fullPath := file.GetPath()
var link model.Link
if args.Type == "thumb" && utils.Ext(file.GetName()) != "svg" {
imgData, err := ioutil.ReadFile(fullPath)
if err != nil {
return nil, err
}
srcBuf := bytes.NewBuffer(imgData)
image, err := imaging.Decode(srcBuf)
if err != nil {
return nil, err
}
thumbImg := imaging.Resize(image, 144, 0, imaging.Lanczos)
var buf bytes.Buffer
err = imaging.Encode(&buf, thumbImg, imaging.PNG)
if err != nil {
return nil, err
}
size := buf.Len()
link.Data = io.NopCloser(&buf)
link.Header = http.Header{
"Content-Length": []string{strconv.Itoa(size)},
}
} else {
link.FilePath = &fullPath
}
return &link, nil
}
func (d *Local) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
fullPath := filepath.Join(parentDir.GetID(), dirName)
fullPath := filepath.Join(parentDir.GetPath(), dirName)
err := os.MkdirAll(fullPath, 0700)
if err != nil {
return errors.Wrapf(err, "error while make dir %s", fullPath)
return err
}
return nil
}
func (d *Local) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
srcPath := srcObj.GetID()
dstPath := filepath.Join(dstDir.GetID(), srcObj.GetName())
srcPath := srcObj.GetPath()
dstPath := filepath.Join(dstDir.GetPath(), srcObj.GetName())
err := os.Rename(srcPath, dstPath)
if err != nil {
return errors.Wrapf(err, "error while move %s to %s", srcPath, dstPath)
return err
}
return nil
}
func (d *Local) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
srcPath := srcObj.GetID()
srcPath := srcObj.GetPath()
dstPath := filepath.Join(filepath.Dir(srcPath), newName)
err := os.Rename(srcPath, dstPath)
if err != nil {
return errors.Wrapf(err, "error while rename %s to %s", srcPath, dstPath)
return err
}
return nil
}
func (d *Local) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
srcPath := srcObj.GetID()
dstPath := filepath.Join(dstDir.GetID(), srcObj.GetName())
srcPath := srcObj.GetPath()
dstPath := filepath.Join(dstDir.GetPath(), srcObj.GetName())
var err error
if srcObj.IsDir() {
err = copyDir(srcPath, dstPath)
err = utils.CopyDir(srcPath, dstPath)
} else {
err = copyFile(srcPath, dstPath)
err = utils.CopyFile(srcPath, dstPath)
}
if err != nil {
return errors.Wrapf(err, "error while copy %s to %s", srcPath, dstPath)
return err
}
return nil
}
@ -146,21 +188,21 @@ func (d *Local) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
var err error
if obj.IsDir() {
err = os.RemoveAll(obj.GetID())
err = os.RemoveAll(obj.GetPath())
} else {
err = os.Remove(obj.GetID())
err = os.Remove(obj.GetPath())
}
if err != nil {
return errors.Wrapf(err, "error while remove %s", obj.GetID())
return err
}
return nil
}
func (d *Local) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
fullPath := filepath.Join(dstDir.GetID(), stream.GetName())
fullPath := filepath.Join(dstDir.GetPath(), stream.GetName())
out, err := os.Create(fullPath)
if err != nil {
return errors.Wrapf(err, "error while create file %s", fullPath)
return err
}
defer func() {
_ = out.Close()
@ -168,15 +210,11 @@ func (d *Local) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
_ = os.Remove(fullPath)
}
}()
err = utils.CopyWithCtx(ctx, out, stream)
err = utils.CopyWithCtx(ctx, out, stream, stream.GetSize(), up)
if err != nil {
return errors.Wrapf(err, "error while copy file %s", fullPath)
return err
}
return nil
}
func (d *Local) Other(ctx context.Context, data interface{}) (interface{}, error) {
return nil, errs.NotSupport
}
var _ driver.Driver = (*Local)(nil)

View File

@ -2,11 +2,12 @@ package local
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/operations"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootFolderPath
driver.RootPath
Thumbnail bool `json:"thumbnail" required:"true" help:"enable thumbnail"`
}
var config = driver.Config{
@ -22,5 +23,5 @@ func New() driver.Driver {
}
func init() {
operations.RegisterDriver(config, New)
op.RegisterDriver(config, New)
}

View File

@ -1,67 +1 @@
package local
import (
"fmt"
"io"
"io/ioutil"
"os"
"path"
)
// copyFile File copies a single file from src to dst
func copyFile(src, dst string) error {
var err error
var srcfd *os.File
var dstfd *os.File
var srcinfo os.FileInfo
if srcfd, err = os.Open(src); err != nil {
return err
}
defer srcfd.Close()
if dstfd, err = os.Create(dst); err != nil {
return err
}
defer dstfd.Close()
if _, err = io.Copy(dstfd, srcfd); err != nil {
return err
}
if srcinfo, err = os.Stat(src); err != nil {
return err
}
return os.Chmod(dst, srcinfo.Mode())
}
// copyDir Dir copies a whole directory recursively
func copyDir(src string, dst string) error {
var err error
var fds []os.FileInfo
var srcinfo os.FileInfo
if srcinfo, err = os.Stat(src); err != nil {
return err
}
if err = os.MkdirAll(dst, srcinfo.Mode()); err != nil {
return err
}
if fds, err = ioutil.ReadDir(src); err != nil {
return err
}
for _, fd := range fds {
srcfp := path.Join(src, fd.Name())
dstfp := path.Join(dst, fd.Name())
if fd.IsDir() {
if err = copyDir(srcfp, dstfp); err != nil {
fmt.Println(err)
}
} else {
if err = copyFile(srcfp, dstfp); err != nil {
fmt.Println(err)
}
}
}
return nil
}

View File

@ -0,0 +1,223 @@
package mediatrack
import (
"context"
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"net/http"
"os"
"path"
"strconv"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2"
"github.com/google/uuid"
log "github.com/sirupsen/logrus"
)
type MediaTrack struct {
model.Storage
Addition
}
func (d *MediaTrack) Config() driver.Config {
return config
}
func (d *MediaTrack) GetAddition() driver.Additional {
return d.Addition
}
func (d *MediaTrack) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
_, err = d.request("https://kayle.api.mediatrack.cn/users", http.MethodGet, nil, nil)
return err
}
func (d *MediaTrack) Drop(ctx context.Context) error {
return nil
}
func (d *MediaTrack) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(f File) (model.Obj, error) {
size, _ := strconv.ParseInt(f.Size, 10, 64)
thumb := ""
if f.File != nil && f.File.Cover != "" {
thumb = "https://nano.mtres.cn/" + f.File.Cover
}
return &model.ObjThumb{
Object: model.Object{
ID: f.ID,
Name: f.Title,
Modified: f.UpdatedAt,
IsFolder: f.File == nil,
Size: size,
},
Thumbnail: model.Thumbnail{Thumbnail: thumb},
}, nil
})
}
//func (d *MediaTrack) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *MediaTrack) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
url := fmt.Sprintf("https://kayn.api.mediatrack.cn/v1/download_token/asset?asset_id=%s&source_type=project&password=&source_id=%s",
file.GetID(), d.ProjectID)
log.Debugf("media track url: %s", url)
body, err := d.request(url, http.MethodGet, nil, nil)
if err != nil {
return nil, err
}
token := utils.Json.Get(body, "data", "token").ToString()
url = "https://kayn.api.mediatrack.cn/v1/download/redirect?token=" + token
return &model.Link{URL: url}, nil
}
func (d *MediaTrack) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
url := fmt.Sprintf("https://jayce.api.mediatrack.cn/v3/assets/%s/children", parentDir.GetID())
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"type": 1,
"title": dirName,
})
}, nil)
return err
}
func (d *MediaTrack) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
data := base.Json{
"parent_id": dstDir.GetID(),
"ids": []string{srcObj.GetID()},
}
url := "https://jayce.api.mediatrack.cn/v4/assets/batch/move"
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *MediaTrack) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
url := "https://jayce.api.mediatrack.cn/v3/assets/" + srcObj.GetID()
data := base.Json{
"title": newName,
}
_, err := d.request(url, http.MethodPut, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *MediaTrack) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
data := base.Json{
"parent_id": dstDir.GetID(),
"ids": []string{srcObj.GetID()},
}
url := "https://jayce.api.mediatrack.cn/v4/assets/batch/clone"
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *MediaTrack) Remove(ctx context.Context, obj model.Obj) error {
dir, err := op.Get(ctx, d, path.Dir(obj.GetPath()))
if err != nil {
return err
}
data := base.Json{
"origin_id": dir.GetID(),
"ids": []string{obj.GetID()},
}
url := "https://jayce.api.mediatrack.cn/v4/assets/batch/delete"
_, err = d.request(url, http.MethodDelete, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
src := "assets/" + uuid.New().String()
var resp UploadResp
_, err := d.request("https://jayce.api.mediatrack.cn/v3/storage/tokens/asset", http.MethodGet, func(req *resty.Request) {
req.SetQueryParam("src", src)
}, &resp)
if err != nil {
return err
}
credential := resp.Data.Credentials
cfg := &aws.Config{
Credentials: credentials.NewStaticCredentials(credential.TmpSecretID, credential.TmpSecretKey, credential.Token),
Region: &resp.Data.Region,
Endpoint: aws.String("cos.accelerate.myqcloud.com"),
}
s, err := session.NewSession(cfg)
if err != nil {
return err
}
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
uploader := s3manager.NewUploader(s)
input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket,
Key: &resp.Data.Object,
Body: tempFile,
}
_, err = uploader.Upload(input)
if err != nil {
return err
}
url := fmt.Sprintf("https://jayce.api.mediatrack.cn/v3/assets/%s/children", dstDir.GetID())
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
h := md5.New()
_, err = io.Copy(h, tempFile)
if err != nil {
return err
}
hash := hex.EncodeToString(h.Sum(nil))
data := base.Json{
"category": 0,
"description": stream.GetName(),
"hash": hash,
"mime": stream.GetMimetype(),
"size": stream.GetSize(),
"src": src,
"title": stream.GetName(),
"type": 0,
}
_, err = d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
var _ driver.Driver = (*MediaTrack)(nil)

View File

@ -0,0 +1,24 @@
package mediatrack
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
AccessToken string `json:"access_token" required:"true"`
ProjectID string `json:"project_id"`
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"updated_at,title,size" default:"title"`
OrderDesc bool `json:"order_desc"`
}
var config = driver.Config{
Name: "MediaTrack",
}
func init() {
op.RegisterDriver(config, func() driver.Driver {
return &MediaTrack{}
})
}

View File

@ -0,0 +1,62 @@
package mediatrack
import "time"
type BaseResp struct {
Status string `json:"status"`
Message string `json:"message"`
}
type File struct {
Category int `json:"category"`
ChildAssets []interface{} `json:"childAssets"`
CommentCount int `json:"comment_count"`
CoverAsset interface{} `json:"cover_asset"`
CoverAssetID string `json:"cover_asset_id"`
CreatedAt time.Time `json:"created_at"`
DeletedAt string `json:"deleted_at"`
Description string `json:"description"`
File *struct {
Cover string `json:"cover"`
Src string `json:"src"`
} `json:"file"`
//FileID string `json:"file_id"`
ID string `json:"id"`
Size string `json:"size"`
Thumbnails []interface{} `json:"thumbnails"`
Title string `json:"title"`
UpdatedAt time.Time `json:"updated_at"`
}
type ChildrenResp struct {
Status string `json:"status"`
Data struct {
Total int `json:"total"`
Assets []File `json:"assets"`
} `json:"data"`
Path string `json:"path"`
TraceID string `json:"trace_id"`
RequestID string `json:"requestId"`
}
type UploadResp struct {
Status string `json:"status"`
Data struct {
Credentials struct {
TmpSecretID string `json:"TmpSecretId"`
TmpSecretKey string `json:"TmpSecretKey"`
Token string `json:"Token"`
ExpiredTime int `json:"ExpiredTime"`
Expiration time.Time `json:"Expiration"`
StartTime int `json:"StartTime"`
} `json:"credentials"`
Object string `json:"object"`
Bucket string `json:"bucket"`
Region string `json:"region"`
URL string `json:"url"`
Size string `json:"size"`
} `json:"data"`
Path string `json:"path"`
TraceID string `json:"trace_id"`
RequestID string `json:"requestId"`
}

View File

@ -0,0 +1,69 @@
package mediatrack
import (
"errors"
"fmt"
"net/http"
"strconv"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *MediaTrack) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
if callback != nil {
callback(req)
}
var e BaseResp
req.SetResult(&e)
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
log.Debugln(res.String())
if e.Status != "SUCCESS" {
return nil, errors.New(e.Message)
}
if resp != nil {
err = utils.Json.Unmarshal(res.Body(), resp)
}
return res.Body(), err
}
func (d *MediaTrack) getFiles(parentId string) ([]File, error) {
files := make([]File, 0)
url := fmt.Sprintf("https://jayce.api.mediatrack.cn/v4/assets/%s/children", parentId)
sort := ""
if d.OrderBy != "" {
if d.OrderDesc {
sort = "-"
}
sort += d.OrderBy
}
page := 1
for {
var resp ChildrenResp
_, err := d.request(url, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(map[string]string{
"page": strconv.Itoa(page),
"size": "50",
"sort": sort,
})
}, &resp)
if err != nil {
return nil, err
}
if len(resp.Data.Assets) == 0 {
break
}
page++
files = append(files, resp.Data.Assets...)
}
return files, nil
}

147
drivers/onedrive/driver.go Normal file
View File

@ -0,0 +1,147 @@
package onedrive
import (
"context"
"net/http"
stdpath "path"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
type Onedrive struct {
model.Storage
Addition
AccessToken string
}
func (d *Onedrive) Config() driver.Config {
return config
}
func (d *Onedrive) GetAddition() driver.Additional {
return d.Addition
}
func (d *Onedrive) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
return d.refreshToken()
}
func (d *Onedrive) Drop(ctx context.Context) error {
return nil
}
func (d *Onedrive) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetPath())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
func (d *Onedrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
f, err := d.GetFile(file.GetPath())
if err != nil {
return nil, err
}
if f.File == nil {
return nil, errs.NotFile
}
return &model.Link{
URL: f.Url,
}, nil
}
func (d *Onedrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
url := d.GetMetaUrl(false, parentDir.GetPath()) + "/children"
data := base.Json{
"name": dirName,
"folder": base.Json{},
"@microsoft.graph.conflictBehavior": "rename",
}
_, err := d.Request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Onedrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
data := base.Json{
"parentReference": base.Json{
"id": dstDir.GetID(),
},
"name": srcObj.GetName(),
}
url := d.GetMetaUrl(false, srcObj.GetPath())
_, err := d.Request(url, http.MethodPatch, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Onedrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
dstDir, err := op.Get(ctx, d, stdpath.Dir(srcObj.GetPath()))
if err != nil {
return err
}
data := base.Json{
"parentReference": base.Json{
"id": dstDir.GetID(),
},
"name": newName,
}
url := d.GetMetaUrl(false, srcObj.GetPath())
_, err = d.Request(url, http.MethodPatch, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Onedrive) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
dst, err := d.GetFile(dstDir.GetPath())
if err != nil {
return err
}
data := base.Json{
"parentReference": base.Json{
"driveId": dst.ParentReference.DriveId,
"id": dst.Id,
},
"name": srcObj.GetName(),
}
url := d.GetMetaUrl(false, srcObj.GetPath()) + "/copy"
_, err = d.Request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Onedrive) Remove(ctx context.Context, obj model.Obj) error {
url := d.GetMetaUrl(false, obj.GetPath())
_, err := d.Request(url, http.MethodDelete, nil, nil)
return err
}
func (d *Onedrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
var err error
if stream.GetSize() <= 4*1024*1024 {
err = d.upSmall(dstDir, stream)
} else {
err = d.upBig(ctx, dstDir, stream, up)
}
return err
}
var _ driver.Driver = (*Onedrive)(nil)

31
drivers/onedrive/meta.go Normal file
View File

@ -0,0 +1,31 @@
package onedrive
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootPath
Region string `json:"region" type:"select" required:"true" options:"global,cn,us,de"`
IsSharepoint bool `json:"is_sharepoint"`
ClientID string `json:"client_id" required:"true"`
ClientSecret string `json:"client_secret" required:"true"`
RedirectUri string `json:"redirect_uri" required:"true" default:"https://tool.nn.ci/onedrive/callback"`
RefreshToken string `json:"refresh_token" required:"true"`
SiteId string `json:"site_id"`
}
var config = driver.Config{
Name: "Onedrive",
LocalSort: true,
DefaultRoot: "/",
}
func New() driver.Driver {
return &Onedrive{}
}
func init() {
op.RegisterDriver(config, New)
}

66
drivers/onedrive/types.go Normal file
View File

@ -0,0 +1,66 @@
package onedrive
import (
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type Host struct {
Oauth string
Api string
}
type TokenErr struct {
Error string `json:"error"`
ErrorDescription string `json:"error_description"`
}
type RespErr struct {
Error struct {
Code string `json:"code"`
Message string `json:"message"`
} `json:"error"`
}
type File struct {
Id string `json:"id"`
Name string `json:"name"`
Size int64 `json:"size"`
LastModifiedDateTime time.Time `json:"lastModifiedDateTime"`
Url string `json:"@microsoft.graph.downloadUrl"`
File *struct {
MimeType string `json:"mimeType"`
} `json:"file"`
Thumbnails []struct {
Medium struct {
Url string `json:"url"`
} `json:"medium"`
} `json:"thumbnails"`
ParentReference struct {
DriveId string `json:"driveId"`
} `json:"parentReference"`
}
func fileToObj(f File) *model.ObjThumbURL {
thumb := ""
if len(f.Thumbnails) > 0 {
thumb = f.Thumbnails[0].Medium.Url
}
return &model.ObjThumbURL{
Object: model.Object{
ID: f.Id,
Name: f.Name,
Size: f.Size,
Modified: f.LastModifiedDateTime,
IsFolder: f.File == nil,
},
Thumbnail: model.Thumbnail{Thumbnail: thumb},
Url: model.Url{Url: f.Url},
}
}
type Files struct {
Value []File `json:"value"`
NextLink string `json:"@odata.nextLink"`
}

200
drivers/onedrive/util.go Normal file
View File

@ -0,0 +1,200 @@
package onedrive
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"net/http"
stdpath "path"
"strconv"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
log "github.com/sirupsen/logrus"
)
var onedriveHostMap = map[string]Host{
"global": {
Oauth: "https://login.microsoftonline.com",
Api: "https://graph.microsoft.com",
},
"cn": {
Oauth: "https://login.chinacloudapi.cn",
Api: "https://microsoftgraph.chinacloudapi.cn",
},
"us": {
Oauth: "https://login.microsoftonline.us",
Api: "https://graph.microsoft.us",
},
"de": {
Oauth: "https://login.microsoftonline.de",
Api: "https://graph.microsoft.de",
},
}
func (d *Onedrive) GetMetaUrl(auth bool, path string) string {
host, _ := onedriveHostMap[d.Region]
if auth {
return host.Oauth
}
if d.IsSharepoint {
if path == "/" || path == "\\" {
return fmt.Sprintf("%s/v1.0/sites/%s/drive/root", host.Api, d.SiteId)
} else {
return fmt.Sprintf("%s/v1.0/sites/%s/drive/root:%s:", host.Api, d.SiteId, path)
}
} else {
if path == "/" || path == "\\" {
return fmt.Sprintf("%s/v1.0/me/drive/root", host.Api)
} else {
return fmt.Sprintf("%s/v1.0/me/drive/root:%s:", host.Api, path)
}
}
}
func (d *Onedrive) refreshToken() error {
var err error
for i := 0; i < 3; i++ {
err = d._refreshToken()
if err == nil {
break
}
}
return err
}
func (d *Onedrive) _refreshToken() error {
url := d.GetMetaUrl(true, "") + "/common/oauth2/v2.0/token"
var resp base.TokenResp
var e TokenErr
_, err := base.RestyClient.R().SetResult(&resp).SetError(&e).SetFormData(map[string]string{
"grant_type": "refresh_token",
"client_id": d.ClientID,
"client_secret": d.ClientSecret,
"redirect_uri": d.RedirectUri,
"refresh_token": d.RefreshToken,
}).Post(url)
if err != nil {
return err
}
if e.Error != "" {
return fmt.Errorf("%s", e.ErrorDescription)
}
if resp.RefreshToken == "" {
return errs.EmptyToken
}
d.RefreshToken, d.AccessToken = resp.RefreshToken, resp.AccessToken
op.MustSaveDriverStorage(d)
return nil
}
func (d *Onedrive) Request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e RespErr
req.SetError(&e)
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
if e.Error.Code != "" {
if e.Error.Code == "InvalidAuthenticationToken" {
err = d.refreshToken()
if err != nil {
return nil, err
}
return d.Request(url, method, callback, resp)
}
return nil, errors.New(e.Error.Message)
}
return res.Body(), nil
}
func (d *Onedrive) getFiles(path string) ([]File, error) {
var res []File
nextLink := d.GetMetaUrl(false, path) + "/children?$expand=thumbnails"
for nextLink != "" {
var files Files
_, err := d.Request(nextLink, http.MethodGet, nil, &files)
if err != nil {
return nil, err
}
res = append(res, files.Value...)
nextLink = files.NextLink
}
return res, nil
}
func (d *Onedrive) GetFile(path string) (*File, error) {
var file File
u := d.GetMetaUrl(false, path)
_, err := d.Request(u, http.MethodGet, nil, &file)
return &file, err
}
func (d *Onedrive) upSmall(dstDir model.Obj, stream model.FileStreamer) error {
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/content"
data, err := io.ReadAll(stream)
if err != nil {
return err
}
_, err = d.Request(url, http.MethodPut, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/createUploadSession"
res, err := d.Request(url, http.MethodPost, nil, nil)
if err != nil {
return err
}
uploadUrl := jsoniter.Get(res, "uploadUrl").ToString()
var finish int64 = 0
const DEFAULT = 4 * 1024 * 1024
for finish < stream.GetSize() {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
log.Debugf("upload: %d", finish)
var byteSize int64 = DEFAULT
left := stream.GetSize() - finish
if left < DEFAULT {
byteSize = left
}
byteData := make([]byte, byteSize)
n, err := io.ReadFull(stream, byteData)
log.Debug(err, n)
if err != nil {
return err
}
req, err := http.NewRequest("PUT", uploadUrl, bytes.NewBuffer(byteData))
req.Header.Set("Content-Length", strconv.Itoa(int(byteSize)))
req.Header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", finish, finish+byteSize-1, stream.GetSize()))
finish += byteSize
res, err := base.HttpClient.Do(req)
if res.StatusCode != 201 && res.StatusCode != 202 {
data, _ := io.ReadAll(res.Body)
res.Body.Close()
return errors.New(string(data))
}
res.Body.Close()
up(int(finish * 100 / stream.GetSize()))
}
return nil
}

201
drivers/pikpak/driver.go Normal file
View File

@ -0,0 +1,201 @@
package pikpak
import (
"context"
"crypto/sha1"
"encoding/hex"
"fmt"
"io"
"net/http"
"os"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
log "github.com/sirupsen/logrus"
)
type PikPak struct {
model.Storage
Addition
RefreshToken string
AccessToken string
}
func (d *PikPak) Config() driver.Config {
return config
}
func (d *PikPak) GetAddition() driver.Additional {
return d.Addition
}
func (d *PikPak) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
return d.login()
}
func (d *PikPak) Drop(ctx context.Context) error {
return nil
}
func (d *PikPak) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
func (d *PikPak) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp File
_, err := d.request(fmt.Sprintf("https://api-drive.mypikpak.com/drive/v1/files/%s?_magic=2021&thumbnail_size=SIZE_LARGE", file.GetID()),
http.MethodGet, nil, &resp)
if err != nil {
return nil, err
}
link := model.Link{
URL: resp.WebContentLink,
}
if len(resp.Medias) > 0 && resp.Medias[0].Link.Url != "" {
log.Debugln("use media link")
link.URL = resp.Medias[0].Link.Url
}
return &link, nil
}
func (d *PikPak) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"kind": "drive#folder",
"parent_id": parentDir.GetID(),
"name": dirName,
})
}, nil)
return err
}
func (d *PikPak) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchMove", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"ids": []string{srcObj.GetID()},
"to": base.Json{
"parent_id": dstDir.GetID(),
},
})
}, nil)
return err
}
func (d *PikPak) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files/"+srcObj.GetID(), http.MethodPatch, func(req *resty.Request) {
req.SetBody(base.Json{
"name": newName,
})
}, nil)
return err
}
func (d *PikPak) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchCopy", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"ids": []string{srcObj.GetID()},
"to": base.Json{
"parent_id": dstDir.GetID(),
},
})
}, nil)
return err
}
func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchTrash", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"ids": []string{obj.GetID()},
})
}, nil)
return err
}
func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
// cal sha1
s := sha1.New()
_, err = io.Copy(s, tempFile)
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
sha1Str := hex.EncodeToString(s.Sum(nil))
data := base.Json{
"kind": "drive#file",
"name": stream.GetName(),
"size": stream.GetSize(),
"hash": strings.ToUpper(sha1Str),
"upload_type": "UPLOAD_TYPE_RESUMABLE",
"objProvider": base.Json{"provider": "UPLOAD_TYPE_UNKNOWN"},
"parent_id": dstDir.GetID(),
}
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err != nil {
return err
}
if stream.GetSize() == 0 {
log.Debugln(string(res))
return nil
}
params := jsoniter.Get(res, "resumable").Get("params")
endpoint := params.Get("endpoint").ToString()
endpointS := strings.Split(endpoint, ".")
endpoint = strings.Join(endpointS[1:], ".")
accessKeyId := params.Get("access_key_id").ToString()
accessKeySecret := params.Get("access_key_secret").ToString()
securityToken := params.Get("security_token").ToString()
key := params.Get("key").ToString()
bucket := params.Get("bucket").ToString()
cfg := &aws.Config{
Credentials: credentials.NewStaticCredentials(accessKeyId, accessKeySecret, securityToken),
Region: aws.String("pikpak"),
Endpoint: &endpoint,
}
ss, err := session.NewSession(cfg)
if err != nil {
return err
}
uploader := s3manager.NewUploader(ss)
input := &s3manager.UploadInput{
Bucket: &bucket,
Key: &key,
Body: tempFile,
}
_, err = uploader.Upload(input)
return err
}
var _ driver.Driver = (*PikPak)(nil)

26
drivers/pikpak/meta.go Normal file
View File

@ -0,0 +1,26 @@
package pikpak
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootID
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
}
var config = driver.Config{
Name: "PikPak",
LocalSort: true,
DefaultRoot: "",
}
func New() driver.Driver {
return &PikPak{}
}
func init() {
op.RegisterDriver(config, New)
}

75
drivers/pikpak/types.go Normal file
View File

@ -0,0 +1,75 @@
package pikpak
import (
"strconv"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type RespErr struct {
ErrorCode int `json:"error_code"`
Error string `json:"error"`
}
type Files struct {
Files []File `json:"files"`
NextPageToken string `json:"next_page_token"`
}
type File struct {
Id string `json:"id"`
Kind string `json:"kind"`
Name string `json:"name"`
ModifiedTime time.Time `json:"modified_time"`
Size string `json:"size"`
ThumbnailLink string `json:"thumbnail_link"`
WebContentLink string `json:"web_content_link"`
Medias []Media `json:"medias"`
}
func fileToObj(f File) *model.ObjThumb {
size, _ := strconv.ParseInt(f.Size, 10, 64)
return &model.ObjThumb{
Object: model.Object{
ID: f.Id,
Name: f.Name,
Size: size,
Modified: f.ModifiedTime,
IsFolder: f.Kind == "drive#folder",
},
Thumbnail: model.Thumbnail{
Thumbnail: f.ThumbnailLink,
},
}
}
type Media struct {
MediaId string `json:"media_id"`
MediaName string `json:"media_name"`
Video struct {
Height int `json:"height"`
Width int `json:"width"`
Duration int `json:"duration"`
BitRate int `json:"bit_rate"`
FrameRate int `json:"frame_rate"`
VideoCodec string `json:"video_codec"`
AudioCodec string `json:"audio_codec"`
VideoType string `json:"video_type"`
} `json:"video"`
Link struct {
Url string `json:"url"`
Token string `json:"token"`
Expire time.Time `json:"expire"`
} `json:"link"`
NeedMoreQuota bool `json:"need_more_quota"`
VipTypes []interface{} `json:"vip_types"`
RedirectLink string `json:"redirect_link"`
IconLink string `json:"icon_link"`
IsDefault bool `json:"is_default"`
Priority int `json:"priority"`
IsOrigin bool `json:"is_origin"`
ResolutionName string `json:"resolution_name"`
IsVisible bool `json:"is_visible"`
Category string `json:"category"`
}

125
drivers/pikpak/util.go Normal file
View File

@ -0,0 +1,125 @@
package pikpak
import (
"errors"
"net/http"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/op"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
)
// do others that not defined in Driver interface
func (d *PikPak) login() error {
url := "https://user.mypikpak.com/v1/auth/signin"
var e RespErr
res, err := base.RestyClient.R().SetError(&e).SetBody(base.Json{
"captcha_token": "",
"client_id": "YNxT9w7GMdWvEOKa",
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
"username": d.Username,
"password": d.Password,
}).Post(url)
if err != nil {
return err
}
if e.ErrorCode != 0 {
return errors.New(e.Error)
}
data := res.Body()
d.RefreshToken = jsoniter.Get(data, "refresh_token").ToString()
d.AccessToken = jsoniter.Get(data, "access_token").ToString()
return nil
}
func (d *PikPak) refreshToken() error {
url := "https://user.mypikpak.com/v1/auth/token"
var e RespErr
res, err := base.RestyClient.R().SetError(&e).
SetHeader("user-agent", "").SetBody(base.Json{
"client_id": "YNxT9w7GMdWvEOKa",
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
"grant_type": "refresh_token",
"refresh_token": d.RefreshToken,
}).Post(url)
if err != nil {
d.Status = err.Error()
op.MustSaveDriverStorage(d)
return err
}
if e.ErrorCode != 0 {
if e.ErrorCode == 4126 {
// refresh_token invalid, re-login
return d.login()
}
d.Status = e.Error
op.MustSaveDriverStorage(d)
return errors.New(e.Error)
}
data := res.Body()
d.Status = "work"
d.RefreshToken = jsoniter.Get(data, "refresh_token").ToString()
d.AccessToken = jsoniter.Get(data, "access_token").ToString()
op.MustSaveDriverStorage(d)
return nil
}
func (d *PikPak) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e RespErr
req.SetError(&e)
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
if e.ErrorCode != 0 {
if e.ErrorCode == 16 {
// login / refresh token
err = d.refreshToken()
if err != nil {
return nil, err
}
return d.request(url, method, callback, resp)
} else {
return nil, errors.New(e.Error)
}
}
return res.Body(), nil
}
func (d *PikPak) getFiles(id string) ([]File, error) {
res := make([]File, 0)
pageToken := "first"
for pageToken != "" {
if pageToken == "first" {
pageToken = ""
}
query := map[string]string{
"parent_id": id,
"thumbnail_size": "SIZE_LARGE",
"with_audit": "true",
"limit": "100",
"filters": `{"phase":{"eq":"PHASE_TYPE_COMPLETE"},"trashed":{"eq":false}}`,
"page_token": pageToken,
}
var resp Files
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
pageToken = resp.NextPageToken
res = append(res, resp.Files...)
}
return res, nil
}

222
drivers/quark/driver.go Normal file
View File

@ -0,0 +1,222 @@
package quark
import (
"context"
"crypto/md5"
"crypto/sha1"
"encoding/hex"
"io"
"net/http"
"os"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type Quark struct {
model.Storage
Addition
}
func (d *Quark) Config() driver.Config {
return config
}
func (d *Quark) GetAddition() driver.Additional {
return d.Addition
}
func (d *Quark) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
_, err = d.request("/config", http.MethodGet, nil, nil)
return err
}
func (d *Quark) Drop(ctx context.Context) error {
return nil
}
func (d *Quark) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.GetFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
//func (d *Quark) Get(ctx context.Context, path string) (model.Obj, error) {
// // TODO this is optional
// return nil, errs.NotImplement
//}
func (d *Quark) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
data := base.Json{
"fids": []string{file.GetID()},
}
var resp DownResp
_, err := d.request("/file/download", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return nil, err
}
return &model.Link{
URL: resp.Data[0].DownloadUrl,
Header: http.Header{
"Cookie": []string{d.Cookie},
"Referer": []string{"https://pan.quark.cn"},
},
}, nil
}
func (d *Quark) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"dir_init_lock": false,
"dir_path": "",
"file_name": dirName,
"pdir_fid": parentDir.GetID(),
}
_, err := d.request("/file", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err == nil {
time.Sleep(time.Second)
}
return err
}
func (d *Quark) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
data := base.Json{
"action_type": 1,
"exclude_fids": []string{},
"filelist": []string{srcObj.GetID()},
"to_pdir_fid": dstDir.GetID(),
}
_, err := d.request("/file/move", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Quark) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
data := base.Json{
"fid": srcObj.GetID(),
"file_name": newName,
}
_, err := d.request("/file/rename", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Quark) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *Quark) Remove(ctx context.Context, obj model.Obj) error {
data := base.Json{
"action_type": 1,
"exclude_fids": []string{},
"filelist": []string{obj.GetID()},
}
_, err := d.request("/file/delete", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Quark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
m := md5.New()
_, err = io.Copy(m, tempFile)
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
md5Str := hex.EncodeToString(m.Sum(nil))
s := sha1.New()
_, err = io.Copy(s, tempFile)
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
sha1Str := hex.EncodeToString(s.Sum(nil))
// pre
pre, err := d.upPre(stream, dstDir.GetID())
if err != nil {
return err
}
log.Debugln("hash: ", md5Str, sha1Str)
// hash
finish, err := d.upHash(md5Str, sha1Str, pre.Data.TaskId)
if err != nil {
return err
}
if finish {
return nil
}
// part up
partSize := pre.Metadata.PartSize
var bytes []byte
md5s := make([]string, 0)
defaultBytes := make([]byte, partSize)
left := stream.GetSize()
partNumber := 1
sizeDivide100 := stream.GetSize() / 100
for left > 0 {
if left > int64(partSize) {
bytes = defaultBytes
} else {
bytes = make([]byte, left)
}
_, err := io.ReadFull(tempFile, bytes)
if err != nil {
return err
}
left -= int64(partSize)
log.Debugf("left: %d", left)
m, err := d.upPart(pre, stream.GetMimetype(), partNumber, bytes)
//m, err := driver.UpPart(pre, file.GetMIMEType(), partNumber, bytes, account, md5Str, sha1Str)
if err != nil {
return err
}
if m == "finish" {
return nil
}
md5s = append(md5s, m)
partNumber++
up(100 - int(left/sizeDivide100))
}
err = d.upCommit(pre, md5s)
if err != nil {
return err
}
return d.upFinish(pre)
}
var _ driver.Driver = (*Quark)(nil)

27
drivers/quark/meta.go Normal file
View File

@ -0,0 +1,27 @@
package quark
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Cookie string `json:"cookie" required:"true"`
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"file_type,file_name,updated_at" default:"file_name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
}
var config = driver.Config{
Name: "Quark",
OnlyProxy: true,
DefaultRoot: "0",
}
func New() driver.Driver {
return &Quark{}
}
func init() {
op.RegisterDriver(config, New)
}

150
drivers/quark/types.go Normal file
View File

@ -0,0 +1,150 @@
package quark
import (
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type Resp struct {
Status int `json:"status"`
Code int `json:"code"`
Message string `json:"message"`
//ReqId string `json:"req_id"`
//Timestamp int `json:"timestamp"`
}
type File struct {
Fid string `json:"fid"`
FileName string `json:"file_name"`
//PdirFid string `json:"pdir_fid"`
//Category int `json:"category"`
//FileType int `json:"file_type"`
Size int64 `json:"size"`
//FormatType string `json:"format_type"`
//Status int `json:"status"`
//Tags string `json:"tags,omitempty"`
//LCreatedAt int64 `json:"l_created_at"`
LUpdatedAt int64 `json:"l_updated_at"`
//NameSpace int `json:"name_space"`
//IncludeItems int `json:"include_items,omitempty"`
//RiskType int `json:"risk_type"`
//BackupSign int `json:"backup_sign"`
//Duration int `json:"duration"`
//FileSource string `json:"file_source"`
File bool `json:"file"`
//CreatedAt int64 `json:"created_at"`
UpdatedAt int64 `json:"updated_at"`
//PrivateExtra struct {} `json:"_private_extra"`
//ObjCategory string `json:"obj_category,omitempty"`
//Thumbnail string `json:"thumbnail,omitempty"`
}
func fileToObj(f File) *model.Object {
return &model.Object{
ID: f.Fid,
Name: f.FileName,
Size: f.Size,
Modified: time.UnixMilli(f.UpdatedAt),
IsFolder: !f.File,
}
}
type SortResp struct {
Resp
Data struct {
List []File `json:"list"`
} `json:"data"`
Metadata struct {
Size int `json:"_size"`
Page int `json:"_page"`
Count int `json:"_count"`
Total int `json:"_total"`
Way string `json:"way"`
} `json:"metadata"`
}
type DownResp struct {
Resp
Data []struct {
//Fid string `json:"fid"`
//FileName string `json:"file_name"`
//PdirFid string `json:"pdir_fid"`
//Category int `json:"category"`
//FileType int `json:"file_type"`
//Size int `json:"size"`
//FormatType string `json:"format_type"`
//Status int `json:"status"`
//Tags string `json:"tags"`
//LCreatedAt int64 `json:"l_created_at"`
//LUpdatedAt int64 `json:"l_updated_at"`
//NameSpace int `json:"name_space"`
//Thumbnail string `json:"thumbnail"`
DownloadUrl string `json:"download_url"`
//Md5 string `json:"md5"`
//RiskType int `json:"risk_type"`
//RangeSize int `json:"range_size"`
//BackupSign int `json:"backup_sign"`
//ObjCategory string `json:"obj_category"`
//Duration int `json:"duration"`
//FileSource string `json:"file_source"`
//File bool `json:"file"`
//CreatedAt int64 `json:"created_at"`
//UpdatedAt int64 `json:"updated_at"`
//PrivateExtra struct {
//} `json:"_private_extra"`
} `json:"data"`
//Metadata struct {
// Acc2 string `json:"acc2"`
// Acc1 string `json:"acc1"`
//} `json:"metadata"`
}
type UpPreResp struct {
Resp
Data struct {
TaskId string `json:"task_id"`
Finish bool `json:"finish"`
UploadId string `json:"upload_id"`
ObjKey string `json:"obj_key"`
UploadUrl string `json:"upload_url"`
Fid string `json:"fid"`
Bucket string `json:"bucket"`
Callback struct {
CallbackUrl string `json:"callbackUrl"`
CallbackBody string `json:"callbackBody"`
} `json:"callback"`
FormatType string `json:"format_type"`
Size int `json:"size"`
AuthInfo string `json:"auth_info"`
} `json:"data"`
Metadata struct {
PartThread int `json:"part_thread"`
Acc2 string `json:"acc2"`
Acc1 string `json:"acc1"`
PartSize int `json:"part_size"` // 分片大小
} `json:"metadata"`
}
type HashResp struct {
Resp
Data struct {
Finish bool `json:"finish"`
Fid string `json:"fid"`
Thumbnail string `json:"thumbnail"`
FormatType string `json:"format_type"`
} `json:"data"`
Metadata struct {
} `json:"metadata"`
}
type UpAuthResp struct {
Resp
Data struct {
AuthKey string `json:"auth_key"`
Speed int `json:"speed"`
Headers []interface{} `json:"headers"`
} `json:"data"`
Metadata struct {
} `json:"metadata"`
}

249
drivers/quark/util.go Normal file
View File

@ -0,0 +1,249 @@
package quark
import (
"crypto/md5"
"encoding/base64"
"errors"
"fmt"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/cookie"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *Quark) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
u := "https://drive.quark.cn/1/clouddrive" + pathname
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
"Accept": "application/json, text/plain, */*",
"Referer": "https://pan.quark.cn/",
})
req.SetQueryParam("pr", "ucpro")
req.SetQueryParam("fr", "pc")
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e Resp
req.SetError(&e)
res, err := req.Execute(method, u)
if err != nil {
return nil, err
}
__puus := cookie.GetCookie(res.Cookies(), "__puus")
if __puus != nil {
d.Cookie = cookie.SetStr(d.Cookie, "__puus", __puus.Value)
op.MustSaveDriverStorage(d)
}
if e.Status >= 400 || e.Code != 0 {
return nil, errors.New(e.Message)
}
return res.Body(), nil
}
func (d *Quark) GetFiles(parent string) ([]File, error) {
files := make([]File, 0)
page := 1
size := 100
query := map[string]string{
"pdir_fid": parent,
"_size": strconv.Itoa(size),
"_fetch_total": "1",
"_sort": "file_type:asc," + d.OrderBy + ":" + d.OrderDirection,
}
for {
query["_page"] = strconv.Itoa(page)
var resp SortResp
_, err := d.request("/file/sort", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
files = append(files, resp.Data.List...)
if page*size >= resp.Metadata.Total {
break
}
page++
}
return files, nil
}
func (d *Quark) upPre(file model.FileStreamer, parentId string) (UpPreResp, error) {
now := time.Now()
data := base.Json{
"ccp_hash_update": true,
"dir_name": "",
"file_name": file.GetName(),
"format_type": file.GetMimetype(),
"l_created_at": now.UnixMilli(),
"l_updated_at": now.UnixMilli(),
"pdir_fid": parentId,
"size": file.GetSize(),
//"same_path_reuse": true,
}
var resp UpPreResp
_, err := d.request("/file/upload/pre", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
return resp, err
}
func (d *Quark) upHash(md5, sha1, taskId string) (bool, error) {
data := base.Json{
"md5": md5,
"sha1": sha1,
"task_id": taskId,
}
log.Debugf("hash: %+v", data)
var resp HashResp
_, err := d.request("/file/update/hash", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
return resp.Data.Finish, err
}
func (d *Quark) upPart(pre UpPreResp, mineType string, partNumber int, bytes []byte) (string, error) {
//func (driver Quark) UpPart(pre UpPreResp, mineType string, partNumber int, bytes []byte, account *model.Account, md5Str, sha1Str string) (string, error) {
timeStr := time.Now().UTC().Format(http.TimeFormat)
data := base.Json{
"auth_info": pre.Data.AuthInfo,
"auth_meta": fmt.Sprintf(`PUT
%s
%s
x-oss-date:%s
x-oss-user-agent:aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit
/%s/%s?partNumber=%d&uploadId=%s`,
mineType, timeStr, timeStr, pre.Data.Bucket, pre.Data.ObjKey, partNumber, pre.Data.UploadId),
"task_id": pre.Data.TaskId,
}
var resp UpAuthResp
_, err := d.request("/file/upload/auth", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return "", err
}
//if partNumber == 1 {
// finish, err := driver.UpHash(md5Str, sha1Str, pre.Data.TaskId, account)
// if err != nil {
// return "", err
// }
// if finish {
// return "finish", nil
// }
//}
u := fmt.Sprintf("https://%s.%s/%s", pre.Data.Bucket, pre.Data.UploadUrl[7:], pre.Data.ObjKey)
res, err := base.RestyClient.R().
SetHeaders(map[string]string{
"Authorization": resp.Data.AuthKey,
"Content-Type": mineType,
"Referer": "https://pan.quark.cn/",
"x-oss-date": timeStr,
"x-oss-user-agent": "aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit",
}).
SetQueryParams(map[string]string{
"partNumber": strconv.Itoa(partNumber),
"uploadId": pre.Data.UploadId,
}).SetBody(bytes).Put(u)
if res.StatusCode() != 200 {
return "", fmt.Errorf("up status: %d, error: %s", res.StatusCode(), res.String())
}
return res.Header().Get("ETag"), nil
}
func (d *Quark) upCommit(pre UpPreResp, md5s []string) error {
timeStr := time.Now().UTC().Format(http.TimeFormat)
log.Debugf("md5s: %+v", md5s)
bodyBuilder := strings.Builder{}
bodyBuilder.WriteString(`<?xml version="1.0" encoding="UTF-8"?>
<CompleteMultipartUpload>
`)
for i, m := range md5s {
bodyBuilder.WriteString(fmt.Sprintf(`<Part>
<PartNumber>%d</PartNumber>
<ETag>%s</ETag>
</Part>
`, i+1, m))
}
bodyBuilder.WriteString("</CompleteMultipartUpload>")
body := bodyBuilder.String()
m := md5.New()
m.Write([]byte(body))
contentMd5 := base64.StdEncoding.EncodeToString(m.Sum(nil))
callbackBytes, err := utils.Json.Marshal(pre.Data.Callback)
if err != nil {
return err
}
callbackBase64 := base64.StdEncoding.EncodeToString(callbackBytes)
data := base.Json{
"auth_info": pre.Data.AuthInfo,
"auth_meta": fmt.Sprintf(`POST
%s
application/xml
%s
x-oss-callback:%s
x-oss-date:%s
x-oss-user-agent:aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit
/%s/%s?uploadId=%s`,
contentMd5, timeStr, callbackBase64, timeStr,
pre.Data.Bucket, pre.Data.ObjKey, pre.Data.UploadId),
"task_id": pre.Data.TaskId,
}
log.Debugf("xml: %s", body)
log.Debugf("auth data: %+v", data)
var resp UpAuthResp
_, err = d.request("/file/upload/auth", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return err
}
u := fmt.Sprintf("https://%s.%s/%s", pre.Data.Bucket, pre.Data.UploadUrl[7:], pre.Data.ObjKey)
res, err := base.RestyClient.R().
SetHeaders(map[string]string{
"Authorization": resp.Data.AuthKey,
"Content-MD5": contentMd5,
"Content-Type": "application/xml",
"Referer": "https://pan.quark.cn/",
"x-oss-callback": callbackBase64,
"x-oss-date": timeStr,
"x-oss-user-agent": "aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit",
}).
SetQueryParams(map[string]string{
"uploadId": pre.Data.UploadId,
}).SetBody(body).Post(u)
if res.StatusCode() != 200 {
return fmt.Errorf("up status: %d, error: %s", res.StatusCode(), res.String())
}
return nil
}
func (d *Quark) upFinish(pre UpPreResp) error {
data := base.Json{
"obj_key": pre.Data.ObjKey,
"task_id": pre.Data.TaskId,
}
_, err := d.request("/file/upload/finish", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err != nil {
return err
}
time.Sleep(time.Second)
return nil
}

155
drivers/s3/driver.go Normal file
View File

@ -0,0 +1,155 @@
package s3
import (
"bytes"
"context"
"fmt"
"io"
"net/url"
stdpath "path"
"time"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
log "github.com/sirupsen/logrus"
)
type S3 struct {
model.Storage
Addition
Session *session.Session
client *s3.S3
linkClient *s3.S3
}
func (d *S3) Config() driver.Config {
return config
}
func (d *S3) GetAddition() driver.Additional {
return d.Addition
}
func (d *S3) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
if d.Region == "" {
d.Region = "alist"
}
err = d.initSession()
if err != nil {
return err
}
d.client = d.getClient(false)
d.linkClient = d.getClient(true)
return nil
}
func (d *S3) Drop(ctx context.Context) error {
return nil
}
func (d *S3) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if d.ListObjectVersion == "v2" {
return d.listV2(dir.GetPath())
}
return d.listV1(dir.GetPath())
}
//func (d *S3) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *S3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
path := getKey(file.GetPath(), false)
disposition := fmt.Sprintf(`attachment;filename="%s"`, url.QueryEscape(stdpath.Base(path)))
input := &s3.GetObjectInput{
Bucket: &d.Bucket,
Key: &path,
//ResponseContentDisposition: &disposition,
}
if d.CustomHost == "" {
input.ResponseContentDisposition = &disposition
}
req, _ := d.linkClient.GetObjectRequest(input)
var link string
var err error
if d.CustomHost != "" {
err = req.Build()
link = req.HTTPRequest.URL.String()
} else {
link, err = req.Presign(time.Hour * time.Duration(d.SignURLExpire))
}
if err != nil {
return nil, err
}
return &model.Link{
URL: link,
}, nil
}
func (d *S3) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
return d.Put(ctx, &model.Object{
Path: stdpath.Join(parentDir.GetPath(), dirName),
}, &model.FileStream{
Obj: &model.Object{
Name: getPlaceholderName(d.Placeholder),
Modified: time.Now(),
},
ReadCloser: io.NopCloser(bytes.NewReader([]byte{})),
Mimetype: "application/octet-stream",
}, func(int) {})
}
func (d *S3) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
err := d.Copy(ctx, srcObj, dstDir)
if err != nil {
return err
}
return d.Remove(ctx, srcObj)
}
func (d *S3) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
err := d.copy(ctx, srcObj.GetPath(), stdpath.Join(stdpath.Dir(srcObj.GetPath()), newName), srcObj.IsDir())
if err != nil {
return err
}
return d.Remove(ctx, srcObj)
}
func (d *S3) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return d.copy(ctx, srcObj.GetPath(), stdpath.Join(dstDir.GetPath(), srcObj.GetName()), srcObj.IsDir())
}
func (d *S3) Remove(ctx context.Context, obj model.Obj) error {
key := getKey(obj.GetPath(), obj.IsDir())
input := &s3.DeleteObjectInput{
Bucket: &d.Bucket,
Key: &key,
}
_, err := d.client.DeleteObject(input)
return err
}
func (d *S3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
uploader := s3manager.NewUploader(d.Session)
key := getKey(stdpath.Join(dstDir.GetPath(), stream.GetName()), false)
log.Debugln("key:", key)
input := &s3manager.UploadInput{
Bucket: &d.Bucket,
Key: &key,
Body: stream,
}
_, err := uploader.Upload(input)
return err
}
var _ driver.Driver = (*S3)(nil)

34
drivers/s3/meta.go Normal file
View File

@ -0,0 +1,34 @@
package s3
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootPath
Bucket string `json:"bucket" required:"true"`
Endpoint string `json:"endpoint" required:"true"`
Region string `json:"region"`
AccessKeyID string `json:"access_key_id" required:"true"`
SecretAccessKey string `json:"secret_access_key" required:"true"`
CustomHost string `json:"custom_host"`
SignURLExpire int `json:"sign_url_expire" type:"number" default:"4"`
Placeholder string `json:"placeholder"`
ForcePathStyle bool `json:"force_path_style"`
ListObjectVersion string `json:"list_object_version" type:"select" options:"v1,v2" default:"v1"`
}
var config = driver.Config{
Name: "S3",
LocalSort: true,
CheckStatus: true,
}
func New() driver.Driver {
return &S3{}
}
func init() {
op.RegisterDriver(config, New)
}

1
drivers/s3/types.go Normal file
View File

@ -0,0 +1 @@
package s3

207
drivers/s3/util.go Normal file
View File

@ -0,0 +1,207 @@
package s3
import (
"context"
"errors"
"net/http"
"path"
"strings"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *S3) initSession() error {
cfg := &aws.Config{
Credentials: credentials.NewStaticCredentials(d.AccessKeyID, d.SecretAccessKey, ""),
Region: &d.Region,
Endpoint: &d.Endpoint,
S3ForcePathStyle: aws.Bool(d.ForcePathStyle),
}
var err error
d.Session, err = session.NewSession(cfg)
return err
}
func (d *S3) getClient(link bool) *s3.S3 {
client := s3.New(d.Session)
if link && d.CustomHost != "" {
client.Handlers.Build.PushBack(func(r *request.Request) {
if r.HTTPRequest.Method != http.MethodGet {
return
}
r.HTTPRequest.URL.Host = d.CustomHost
})
}
return client
}
func getKey(path string, dir bool) string {
path = strings.TrimPrefix(path, "/")
if path != "" && dir {
path += "/"
}
return path
}
var defaultPlaceholderName = ".placeholder"
func getPlaceholderName(placeholder string) string {
if placeholder == "" {
return defaultPlaceholderName
}
return placeholder
}
func (d *S3) listV1(prefix string) ([]model.Obj, error) {
prefix = getKey(prefix, true)
log.Debugf("list: %s", prefix)
files := make([]model.Obj, 0)
marker := ""
for {
input := &s3.ListObjectsInput{
Bucket: &d.Bucket,
Marker: &marker,
Prefix: &prefix,
Delimiter: aws.String("/"),
}
listObjectsResult, err := d.client.ListObjects(input)
if err != nil {
return nil, err
}
for _, object := range listObjectsResult.CommonPrefixes {
name := path.Base(strings.Trim(*object.Prefix, "/"))
file := model.Object{
//Id: *object.Key,
Name: name,
Modified: d.Modified,
IsFolder: true,
}
files = append(files, &file)
}
for _, object := range listObjectsResult.Contents {
name := path.Base(*object.Key)
if name == getPlaceholderName(d.Placeholder) {
continue
}
file := model.Object{
//Id: *object.Key,
Name: name,
Size: *object.Size,
Modified: *object.LastModified,
}
files = append(files, &file)
}
if listObjectsResult.IsTruncated == nil {
return nil, errors.New("IsTruncated nil")
}
if *listObjectsResult.IsTruncated {
marker = *listObjectsResult.NextMarker
} else {
break
}
}
return files, nil
}
func (d *S3) listV2(prefix string) ([]model.Obj, error) {
prefix = getKey(prefix, true)
files := make([]model.Obj, 0)
var continuationToken, startAfter *string
for {
input := &s3.ListObjectsV2Input{
Bucket: &d.Bucket,
ContinuationToken: continuationToken,
Prefix: &prefix,
Delimiter: aws.String("/"),
StartAfter: startAfter,
}
listObjectsResult, err := d.client.ListObjectsV2(input)
if err != nil {
return nil, err
}
log.Debugf("resp: %+v", listObjectsResult)
for _, object := range listObjectsResult.CommonPrefixes {
name := path.Base(strings.Trim(*object.Prefix, "/"))
file := model.Object{
//Id: *object.Key,
Name: name,
Modified: d.Modified,
IsFolder: true,
}
files = append(files, &file)
}
for _, object := range listObjectsResult.Contents {
name := path.Base(*object.Key)
if name == getPlaceholderName(d.Placeholder) {
continue
}
file := model.Object{
//Id: *object.Key,
Name: name,
Size: *object.Size,
Modified: *object.LastModified,
}
files = append(files, &file)
}
if !aws.BoolValue(listObjectsResult.IsTruncated) {
break
}
if listObjectsResult.NextContinuationToken != nil {
continuationToken = listObjectsResult.NextContinuationToken
continue
}
if len(listObjectsResult.Contents) == 0 {
break
}
startAfter = listObjectsResult.Contents[len(listObjectsResult.Contents)-1].Key
}
return files, nil
}
func (d *S3) copy(ctx context.Context, src string, dst string, isDir bool) error {
if isDir {
return d.copyDir(ctx, src, dst)
}
return d.copyFile(ctx, src, dst)
}
func (d *S3) copyFile(ctx context.Context, src string, dst string) error {
srcKey := getKey(src, false)
dstKey := getKey(dst, false)
input := &s3.CopyObjectInput{
Bucket: &d.Bucket,
CopySource: aws.String("/" + d.Bucket + "/" + srcKey),
Key: &dstKey,
}
_, err := d.client.CopyObject(input)
return err
}
func (d *S3) copyDir(ctx context.Context, src string, dst string) error {
objs, err := op.List(ctx, d, src, model.ListArgs{})
if err != nil {
return err
}
for _, obj := range objs {
cSrc := path.Join(src, obj.GetName())
cDst := path.Join(dst, obj.GetName())
if obj.IsDir() {
err = d.copyDir(ctx, cSrc, cDst)
} else {
err = d.copyFile(ctx, cSrc, cDst)
}
if err != nil {
return err
}
}
return nil
}

102
drivers/sftp/driver.go Normal file
View File

@ -0,0 +1,102 @@
package sftp
import (
"context"
"os"
"path"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/sftp"
)
type SFTP struct {
model.Storage
Addition
client *sftp.Client
}
func (d *SFTP) Config() driver.Config {
return config
}
func (d *SFTP) GetAddition() driver.Additional {
return d.Addition
}
func (d *SFTP) Init(ctx context.Context, storage model.Storage) error {
d.Storage = storage
err := utils.Json.UnmarshalFromString(d.Storage.Addition, &d.Addition)
if err != nil {
return err
}
return d.initClient()
}
func (d *SFTP) Drop(ctx context.Context) error {
if d.client != nil {
_ = d.client.Close()
}
return nil
}
func (d *SFTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.client.ReadDir(dir.GetPath())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src os.FileInfo) (model.Obj, error) {
return fileToObj(src), nil
})
}
//func (d *SFTP) Get(ctx context.Context, path string) (model.Obj, error) {
// // this is optional
// return nil, errs.NotImplement
//}
func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
remoteFile, err := d.client.Open(file.GetPath())
if err != nil {
return nil, err
}
return &model.Link{
Data: remoteFile,
}, nil
}
func (d *SFTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
return d.client.MkdirAll(path.Join(parentDir.GetPath(), dirName))
}
func (d *SFTP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
return d.client.Rename(srcObj.GetPath(), path.Join(dstDir.GetPath(), srcObj.GetName()))
}
func (d *SFTP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
return d.client.Rename(srcObj.GetPath(), path.Join(path.Dir(srcObj.GetPath()), newName))
}
func (d *SFTP) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *SFTP) Remove(ctx context.Context, obj model.Obj) error {
return d.remove(obj.GetPath())
}
func (d *SFTP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
dstFile, err := d.client.Create(path.Join(dstDir.GetPath(), stream.GetName()))
if err != nil {
return err
}
defer func() {
_ = dstFile.Close()
}()
err = utils.CopyWithCtx(ctx, dstFile, stream, stream.GetSize(), up)
return err
}
var _ driver.Driver = (*SFTP)(nil)

30
drivers/sftp/meta.go Normal file
View File

@ -0,0 +1,30 @@
package sftp
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Address string `json:"address" required:"true"`
Username string `json:"username" required:"true"`
PrivateKey string `json:"private_key" type:"text"`
Password string `json:"password"`
driver.RootPath
}
var config = driver.Config{
Name: "SFTP",
LocalSort: true,
OnlyLocal: true,
DefaultRoot: "/",
CheckStatus: true,
}
func New() driver.Driver {
return &SFTP{}
}
func init() {
op.RegisterDriver(config, New)
}

16
drivers/sftp/types.go Normal file
View File

@ -0,0 +1,16 @@
package sftp
import (
"os"
"github.com/alist-org/alist/v3/internal/model"
)
func fileToObj(f os.FileInfo) model.Obj {
return &model.Object{
Name: f.Name(),
Size: f.Size(),
Modified: f.ModTime(),
IsFolder: f.IsDir(),
}
}

Some files were not shown because too many files have changed in this diff Show More