Compare commits
153 Commits
refactor/o
...
v3.33.0
Author | SHA1 | Date | |
---|---|---|---|
cf08aa3668 | |||
9c84b6596f | |||
022e0ca292 | |||
88947f6676 | |||
b07ddfbc13 | |||
9a0a63d34c | |||
195c869272 | |||
bdfc1591bd | |||
82222840fe | |||
45e009a22c | |||
ac68079a76 | |||
2a17d0c2cd | |||
6f6a8e6dfc | |||
7d9ecba99c | |||
ae6984714d | |||
d0f88bd1cb | |||
f8b1f87a5f | |||
71e4e1ab6e | |||
7e6522c81e | |||
94a80bccfe | |||
e66abb3f58 | |||
742335f80e | |||
f1979a8bbc | |||
1f835502ba | |||
424ab2d0c0 | |||
858ba19670 | |||
0c7e47a76c | |||
53926d5cd0 | |||
47f4b05517 | |||
6d85f1b0c0 | |||
e49fda3e2a | |||
da5e35578a | |||
812f58ae6d | |||
9bd3c87bcc | |||
c82866975e | |||
aef952ae68 | |||
9222510d8d | |||
d88b54d98a | |||
85a28d9822 | |||
4f7761fe2c | |||
a8c900d09e | |||
8bccb69e8d | |||
0f29a811bf | |||
442c2f77ea | |||
ce06f394f1 | |||
e3e790f461 | |||
f0e8c0e886 | |||
86b35ae5cf | |||
4930f85b90 | |||
85fe65951d | |||
1381e8fb27 | |||
292bbe94ee | |||
bb6747de4e | |||
555ef0eb1a | |||
bff56ffd0f | |||
34b73b94f7 | |||
434892f135 | |||
e6e2d03ba1 | |||
28bb3f6310 | |||
fb729c1846 | |||
4448e08f5b | |||
8020d42b10 | |||
9d5fb7f595 | |||
126cfe9f93 | |||
fd96a7ccf4 | |||
03b9b9a119 | |||
03dbdfc0dd | |||
2683621ed7 | |||
be537aa49b | |||
6f742a68cf | |||
97a4b8321d | |||
8c432d3339 | |||
ff25e51f80 | |||
88831b5d5a | |||
b97c9173af | |||
207c7e05fe | |||
7db27e6da8 | |||
b5cc90cb5a | |||
8a427ddc49 | |||
c36644a172 | |||
45b1ff4a24 | |||
a4a9675616 | |||
8531b23382 | |||
2c15349ce4 | |||
5afd65b65c | |||
e2434029f9 | |||
bdf7abe717 | |||
2c8d003c2e | |||
a006f57637 | |||
be5d94cd11 | |||
977b3cf9ab | |||
182aacd309 | |||
57bac9e0d2 | |||
478470f609 | |||
6b8f35e7fa | |||
697a0ed2d3 | |||
299bfb4d7b | |||
3eca38e599 | |||
ab216ed170 | |||
e91c42c9dc | |||
54f7b21a73 | |||
de56f926cf | |||
6d4ab57a0e | |||
734d4b0354 | |||
74b20dedc3 | |||
83c2269330 | |||
296be88b5f | |||
026e944cbb | |||
8bdfc7ac8e | |||
e4a6b758dc | |||
66b7fe1e1b | |||
f475eb4401 | |||
b99e709bdb | |||
f4dcf4599c | |||
54e75d7287 | |||
d142fc3449 | |||
f23567199b | |||
1420492d81 | |||
b88067ea2f | |||
d5f381ef6f | |||
68af284dad | |||
d26887d211 | |||
3f405de6a9 | |||
6100647310 | |||
34746e951c | |||
b6134dc515 | |||
d455a232ef | |||
fe34d30d17 | |||
0fbb986ba9 | |||
1280070438 | |||
d7f66138eb | |||
b2890f05ab | |||
7583c4d734 | |||
11a30c5044 | |||
de9647a5fa | |||
8d5283604c | |||
867accafd1 | |||
6fc6751463 | |||
f904596cbc | |||
3d51845f57 | |||
a7421d8fc2 | |||
55a14bc271 | |||
91f51f17d0 | |||
4355dae491 | |||
da1c7a4c23 | |||
769281bd40 | |||
3bbdd4fa89 | |||
68f440abdb | |||
65c5ec0c34 | |||
a6325967d0 | |||
4dff49470a | |||
cc86d6f3d1 | |||
c0f9c8ebaf |
44
.air.toml
Normal file
44
.air.toml
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
root = "."
|
||||||
|
testdata_dir = "testdata"
|
||||||
|
tmp_dir = "tmp"
|
||||||
|
|
||||||
|
[build]
|
||||||
|
args_bin = ["server"]
|
||||||
|
bin = "./tmp/main"
|
||||||
|
cmd = "go build -o ./tmp/main ."
|
||||||
|
delay = 0
|
||||||
|
exclude_dir = ["assets", "tmp", "vendor", "testdata"]
|
||||||
|
exclude_file = []
|
||||||
|
exclude_regex = ["_test.go"]
|
||||||
|
exclude_unchanged = false
|
||||||
|
follow_symlink = false
|
||||||
|
full_bin = ""
|
||||||
|
include_dir = []
|
||||||
|
include_ext = ["go", "tpl", "tmpl", "html"]
|
||||||
|
include_file = []
|
||||||
|
kill_delay = "0s"
|
||||||
|
log = "build-errors.log"
|
||||||
|
poll = false
|
||||||
|
poll_interval = 0
|
||||||
|
rerun = false
|
||||||
|
rerun_delay = 500
|
||||||
|
send_interrupt = false
|
||||||
|
stop_on_error = false
|
||||||
|
|
||||||
|
[color]
|
||||||
|
app = ""
|
||||||
|
build = "yellow"
|
||||||
|
main = "magenta"
|
||||||
|
runner = "green"
|
||||||
|
watcher = "cyan"
|
||||||
|
|
||||||
|
[log]
|
||||||
|
main_only = false
|
||||||
|
time = false
|
||||||
|
|
||||||
|
[misc]
|
||||||
|
clean_on_exit = false
|
||||||
|
|
||||||
|
[screen]
|
||||||
|
clear_on_rebuild = false
|
||||||
|
keep_scroll = true
|
8
.github/workflows/auto_lang.yml
vendored
8
.github/workflows/auto_lang.yml
vendored
@ -20,22 +20,22 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ ubuntu-latest ]
|
platform: [ ubuntu-latest ]
|
||||||
go-version: [ '1.20' ]
|
go-version: [ '1.21' ]
|
||||||
name: auto generate lang.json
|
name: auto generate lang.json
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
steps:
|
steps:
|
||||||
- name: Setup go
|
- name: Setup go
|
||||||
uses: actions/setup-go@v4
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: ${{ matrix.go-version }}
|
go-version: ${{ matrix.go-version }}
|
||||||
|
|
||||||
- name: Checkout alist
|
- name: Checkout alist
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: alist
|
path: alist
|
||||||
|
|
||||||
- name: Checkout alist-web
|
- name: Checkout alist-web
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: 'alist-org/alist-web'
|
repository: 'alist-org/alist-web'
|
||||||
ref: main
|
ref: main
|
||||||
|
13
.github/workflows/build.yml
vendored
13
.github/workflows/build.yml
vendored
@ -15,17 +15,20 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ubuntu-latest]
|
platform: [ubuntu-latest]
|
||||||
go-version: [ '1.20' ]
|
go-version: [ '1.21' ]
|
||||||
name: Build
|
name: Build
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v4
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: ${{ matrix.go-version }}
|
go-version: ${{ matrix.go-version }}
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: benjlevesque/short-sha@v2.2
|
||||||
|
id: short-sha
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
@ -39,7 +42,7 @@ jobs:
|
|||||||
bash build.sh dev
|
bash build.sh dev
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: alist
|
name: alist_${{ env.SHA }}
|
||||||
path: dist
|
path: dist
|
65
.github/workflows/build_docker.yml
vendored
65
.github/workflows/build_docker.yml
vendored
@ -3,6 +3,8 @@ name: build_docker
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ main ]
|
branches: [ main ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
@ -10,45 +12,80 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_docker:
|
build_docker:
|
||||||
name: Build docker
|
name: Build Docker
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: xhofe/alist
|
images: xhofe/alist
|
||||||
- name: Replace release with dev
|
|
||||||
run: |
|
- name: Docker meta with ffmpeg
|
||||||
sed -i 's/release/dev/g' Dockerfile
|
id: meta-ffmpeg
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: xhofe/alist
|
||||||
|
flavor: |
|
||||||
|
suffix=-ffmpeg,onlatest=true
|
||||||
|
|
||||||
|
- uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: 'stable'
|
||||||
|
|
||||||
|
- name: Build go binary
|
||||||
|
run: bash build.sh dev docker-multiplatform
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
uses: docker/login-action@v2
|
if: github.event_name == 'push'
|
||||||
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: xhofe
|
username: xhofe
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: true
|
file: Dockerfile.ci
|
||||||
|
push: ${{ github.event_name == 'push' }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
|
- name: Replace dockerfile tag
|
||||||
|
run: |
|
||||||
|
sed -i -e "s/latest/main/g" Dockerfile.ffmpeg
|
||||||
|
|
||||||
|
- name: Build and push with ffmpeg
|
||||||
|
id: docker_build_ffmpeg
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.ffmpeg
|
||||||
|
push: ${{ github.event_name == 'push' }}
|
||||||
|
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||||
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
build_docker_with_aria2:
|
build_docker_with_aria2:
|
||||||
needs: build_docker
|
needs: build_docker
|
||||||
name: Build docker with aria2
|
name: Build docker with aria2
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: alist-org/with_aria2
|
repository: alist-org/with_aria2
|
||||||
ref: main
|
ref: main
|
||||||
@ -66,4 +103,4 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.MY_TOKEN }}
|
github_token: ${{ secrets.MY_TOKEN }}
|
||||||
branch: main
|
branch: main
|
||||||
repository: alist-org/with_aria2
|
repository: alist-org/with_aria2
|
||||||
|
2
.github/workflows/changelog.yml
vendored
2
.github/workflows/changelog.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||||
|
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ ubuntu-latest ]
|
platform: [ ubuntu-latest ]
|
||||||
go-version: [ '1.20' ]
|
go-version: [ '1.21' ]
|
||||||
name: Release
|
name: Release
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
steps:
|
steps:
|
||||||
@ -21,12 +21,12 @@ jobs:
|
|||||||
prerelease: true
|
prerelease: true
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v4
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: ${{ matrix.go-version }}
|
go-version: ${{ matrix.go-version }}
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@ -53,7 +53,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: alist-org/desktop-release
|
repository: alist-org/desktop-release
|
||||||
ref: main
|
ref: main
|
||||||
|
34
.github/workflows/release_android.yml
vendored
Normal file
34
.github/workflows/release_android.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
name: release_android
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ published ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release_android:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform: [ ubuntu-latest ]
|
||||||
|
go-version: [ '1.21' ]
|
||||||
|
name: Release
|
||||||
|
runs-on: ${{ matrix.platform }}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Setup Go
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: ${{ matrix.go-version }}
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
bash build.sh release android
|
||||||
|
|
||||||
|
- name: Upload assets
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
files: build/compress/*
|
42
.github/workflows/release_docker.yml
vendored
42
.github/workflows/release_docker.yml
vendored
@ -11,43 +11,71 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: 'stable'
|
||||||
|
|
||||||
|
- name: Build go binary
|
||||||
|
run: bash build.sh release docker-multiplatform
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: xhofe/alist
|
images: xhofe/alist
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: xhofe
|
username: xhofe
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
|
file: Dockerfile.ci
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
|
- name: Docker meta with ffmpeg
|
||||||
|
id: meta-ffmpeg
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: xhofe/alist
|
||||||
|
flavor: |
|
||||||
|
latest=true
|
||||||
|
suffix=-ffmpeg,onlatest=true
|
||||||
|
|
||||||
|
- name: Build and push with ffmpeg
|
||||||
|
id: docker_build_ffmpeg
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.ffmpeg
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||||
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
release_docker_with_aria2:
|
release_docker_with_aria2:
|
||||||
needs: release_docker
|
needs: release_docker
|
||||||
name: Release docker with aria2
|
name: Release docker with aria2
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: alist-org/with_aria2
|
repository: alist-org/with_aria2
|
||||||
ref: main
|
ref: main
|
||||||
|
6
.github/workflows/release_linux_musl.yml
vendored
6
.github/workflows/release_linux_musl.yml
vendored
@ -9,18 +9,18 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ ubuntu-latest ]
|
platform: [ ubuntu-latest ]
|
||||||
go-version: [ '1.20' ]
|
go-version: [ '1.21' ]
|
||||||
name: Release
|
name: Release
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v4
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: ${{ matrix.go-version }}
|
go-version: ${{ matrix.go-version }}
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
6
.github/workflows/release_linux_musl_arm.yml
vendored
6
.github/workflows/release_linux_musl_arm.yml
vendored
@ -9,18 +9,18 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ ubuntu-latest ]
|
platform: [ ubuntu-latest ]
|
||||||
go-version: [ '1.20' ]
|
go-version: [ '1.21' ]
|
||||||
name: Release
|
name: Release
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v4
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: ${{ matrix.go-version }}
|
go-version: ${{ matrix.go-version }}
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -24,6 +24,7 @@ output/
|
|||||||
*.json
|
*.json
|
||||||
/build
|
/build
|
||||||
/data/
|
/data/
|
||||||
|
/tmp/
|
||||||
/log/
|
/log/
|
||||||
/lang/
|
/lang/
|
||||||
/daemon/
|
/daemon/
|
||||||
|
19
Dockerfile
19
Dockerfile
@ -1,18 +1,23 @@
|
|||||||
FROM alpine:3.18 as builder
|
FROM alpine:edge as builder
|
||||||
LABEL stage=go-builder
|
LABEL stage=go-builder
|
||||||
WORKDIR /app/
|
WORKDIR /app/
|
||||||
|
RUN apk add --no-cache bash curl gcc git go musl-dev
|
||||||
|
COPY go.mod go.sum ./
|
||||||
|
RUN go mod download
|
||||||
COPY ./ ./
|
COPY ./ ./
|
||||||
RUN apk add --no-cache bash curl gcc git go musl-dev; \
|
RUN bash build.sh release docker
|
||||||
bash build.sh release docker
|
|
||||||
|
|
||||||
FROM alpine:3.18
|
FROM alpine:edge
|
||||||
LABEL MAINTAINER="i@nn.ci"
|
LABEL MAINTAINER="i@nn.ci"
|
||||||
VOLUME /opt/alist/data/
|
VOLUME /opt/alist/data/
|
||||||
WORKDIR /opt/alist/
|
WORKDIR /opt/alist/
|
||||||
COPY --from=builder /app/bin/alist ./
|
COPY --from=builder /app/bin/alist ./
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
|
RUN apk update && \
|
||||||
chmod +x /entrypoint.sh
|
apk upgrade --no-cache && \
|
||||||
|
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||||
|
chmod +x /entrypoint.sh && \
|
||||||
|
rm -rf /var/cache/apk/*
|
||||||
ENV PUID=0 PGID=0 UMASK=022
|
ENV PUID=0 PGID=0 UMASK=022
|
||||||
EXPOSE 5244 5245
|
EXPOSE 5244 5245
|
||||||
CMD [ "/entrypoint.sh" ]
|
CMD [ "/entrypoint.sh" ]
|
16
Dockerfile.ci
Normal file
16
Dockerfile.ci
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
FROM alpine:edge
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
LABEL MAINTAINER="i@nn.ci"
|
||||||
|
VOLUME /opt/alist/data/
|
||||||
|
WORKDIR /opt/alist/
|
||||||
|
COPY /${TARGETPLATFORM}/alist ./
|
||||||
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
RUN apk update && \
|
||||||
|
apk upgrade --no-cache && \
|
||||||
|
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||||
|
chmod +x /entrypoint.sh && \
|
||||||
|
rm -rf /var/cache/apk/* && \
|
||||||
|
/entrypoint.sh version
|
||||||
|
ENV PUID=0 PGID=0 UMASK=022
|
||||||
|
EXPOSE 5244 5245
|
||||||
|
CMD [ "/entrypoint.sh" ]
|
4
Dockerfile.ffmpeg
Normal file
4
Dockerfile.ffmpeg
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
FROM xhofe/alist:latest
|
||||||
|
RUN apk update && \
|
||||||
|
apk add --no-cache ffmpeg \
|
||||||
|
rm -rf /var/cache/apk/*
|
13
README.md
13
README.md
@ -45,7 +45,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
|||||||
|
|
||||||
- [x] Multiple storages
|
- [x] Multiple storages
|
||||||
- [x] Local storage
|
- [x] Local storage
|
||||||
- [x] [Aliyundrive](https://www.aliyundrive.com/)
|
- [x] [Aliyundrive](https://www.alipan.com/)
|
||||||
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
||||||
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
||||||
- [x] [GoogleDrive](https://drive.google.com/)
|
- [x] [GoogleDrive](https://drive.google.com/)
|
||||||
@ -66,7 +66,8 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
|||||||
- [x] [Quark](https://pan.quark.cn)
|
- [x] [Quark](https://pan.quark.cn)
|
||||||
- [x] [Thunder](https://pan.xunlei.com)
|
- [x] [Thunder](https://pan.xunlei.com)
|
||||||
- [x] [Lanzou](https://www.lanzou.com/)
|
- [x] [Lanzou](https://www.lanzou.com/)
|
||||||
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
|
- [x] [ILanzou](https://www.ilanzou.com/)
|
||||||
|
- [x] [Aliyundrive share](https://www.alipan.com/)
|
||||||
- [x] [Google photo](https://photos.google.com/)
|
- [x] [Google photo](https://photos.google.com/)
|
||||||
- [x] [Mega.nz](https://mega.nz)
|
- [x] [Mega.nz](https://mega.nz)
|
||||||
- [x] [Baidu photo](https://photo.baidu.com/)
|
- [x] [Baidu photo](https://photo.baidu.com/)
|
||||||
@ -74,6 +75,8 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
|||||||
- [x] [115](https://115.com/)
|
- [x] [115](https://115.com/)
|
||||||
- [X] Cloudreve
|
- [X] Cloudreve
|
||||||
- [x] [Dropbox](https://www.dropbox.com/)
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
|
- [x] [FeijiPan](https://www.feijipan.com/)
|
||||||
|
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
|
||||||
- [x] Easy to deploy and out-of-the-box
|
- [x] Easy to deploy and out-of-the-box
|
||||||
- [x] File preview (PDF, markdown, code, plain text, ...)
|
- [x] File preview (PDF, markdown, code, plain text, ...)
|
||||||
- [x] Image preview in gallery mode
|
- [x] Image preview in gallery mode
|
||||||
@ -112,9 +115,9 @@ https://alist.nn.ci/guide/sponsor.html
|
|||||||
|
|
||||||
### Special sponsors
|
### Special sponsors
|
||||||
|
|
||||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
|
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
## Contributors
|
## Contributors
|
||||||
|
|
||||||
|
13
README_cn.md
13
README_cn.md
@ -45,7 +45,7 @@
|
|||||||
|
|
||||||
- [x] 多种存储
|
- [x] 多种存储
|
||||||
- [x] 本地存储
|
- [x] 本地存储
|
||||||
- [x] [阿里云盘](https://www.aliyundrive.com/)
|
- [x] [阿里云盘](https://www.alipan.com/)
|
||||||
- [x] OneDrive / Sharepoint([国际版](https://www.office.com/), [世纪互联](https://portal.partner.microsoftonline.cn),de,us)
|
- [x] OneDrive / Sharepoint([国际版](https://www.office.com/), [世纪互联](https://portal.partner.microsoftonline.cn),de,us)
|
||||||
- [x] [天翼云盘](https://cloud.189.cn) (个人云, 家庭云)
|
- [x] [天翼云盘](https://cloud.189.cn) (个人云, 家庭云)
|
||||||
- [x] [GoogleDrive](https://drive.google.com/)
|
- [x] [GoogleDrive](https://drive.google.com/)
|
||||||
@ -65,7 +65,8 @@
|
|||||||
- [x] [夸克网盘](https://pan.quark.cn)
|
- [x] [夸克网盘](https://pan.quark.cn)
|
||||||
- [x] [迅雷网盘](https://pan.xunlei.com)
|
- [x] [迅雷网盘](https://pan.xunlei.com)
|
||||||
- [x] [蓝奏云](https://www.lanzou.com/)
|
- [x] [蓝奏云](https://www.lanzou.com/)
|
||||||
- [x] [阿里云盘分享](https://www.aliyundrive.com/)
|
- [x] [蓝奏云优享版](https://www.ilanzou.com/)
|
||||||
|
- [x] [阿里云盘分享](https://www.alipan.com/)
|
||||||
- [x] [谷歌相册](https://photos.google.com/)
|
- [x] [谷歌相册](https://photos.google.com/)
|
||||||
- [x] [Mega.nz](https://mega.nz)
|
- [x] [Mega.nz](https://mega.nz)
|
||||||
- [x] [一刻相册](https://photo.baidu.com/)
|
- [x] [一刻相册](https://photo.baidu.com/)
|
||||||
@ -73,6 +74,8 @@
|
|||||||
- [x] [115](https://115.com/)
|
- [x] [115](https://115.com/)
|
||||||
- [X] Cloudreve
|
- [X] Cloudreve
|
||||||
- [x] [Dropbox](https://www.dropbox.com/)
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
|
- [x] [飞机盘](https://www.feijipan.com/)
|
||||||
|
- [x] [多吉云](https://www.dogecloud.com/product/oss)
|
||||||
- [x] 部署方便,开箱即用
|
- [x] 部署方便,开箱即用
|
||||||
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
||||||
- [x] 画廊模式下的图像预览
|
- [x] 画廊模式下的图像预览
|
||||||
@ -110,9 +113,9 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
|
|||||||
|
|
||||||
### 特别赞助
|
### 特别赞助
|
||||||
|
|
||||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (国内API服务器赞助)
|
- [VidHub](https://zh.okaapps.com/product/1659622164?ref=alist) - 苹果生态下优雅的网盘视频播放器,iPhone,iPad,Mac,Apple TV全平台支持。
|
||||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
|
||||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
## 贡献者
|
## 贡献者
|
||||||
|
|
||||||
|
13
README_ja.md
13
README_ja.md
@ -45,7 +45,7 @@
|
|||||||
|
|
||||||
- [x] マルチストレージ
|
- [x] マルチストレージ
|
||||||
- [x] ローカルストレージ
|
- [x] ローカルストレージ
|
||||||
- [x] [Aliyundrive](https://www.aliyundrive.com/)
|
- [x] [Aliyundrive](https://www.alipan.com/)
|
||||||
- [x] OneDrive / Sharepoint ([グローバル](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
- [x] OneDrive / Sharepoint ([グローバル](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
||||||
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
||||||
- [x] [GoogleDrive](https://drive.google.com/)
|
- [x] [GoogleDrive](https://drive.google.com/)
|
||||||
@ -66,7 +66,8 @@
|
|||||||
- [x] [Quark](https://pan.quark.cn)
|
- [x] [Quark](https://pan.quark.cn)
|
||||||
- [x] [Thunder](https://pan.xunlei.com)
|
- [x] [Thunder](https://pan.xunlei.com)
|
||||||
- [x] [Lanzou](https://www.lanzou.com/)
|
- [x] [Lanzou](https://www.lanzou.com/)
|
||||||
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
|
- [x] [ILanzou](https://www.ilanzou.com/)
|
||||||
|
- [x] [Aliyundrive share](https://www.alipan.com/)
|
||||||
- [x] [Google photo](https://photos.google.com/)
|
- [x] [Google photo](https://photos.google.com/)
|
||||||
- [x] [Mega.nz](https://mega.nz)
|
- [x] [Mega.nz](https://mega.nz)
|
||||||
- [x] [Baidu photo](https://photo.baidu.com/)
|
- [x] [Baidu photo](https://photo.baidu.com/)
|
||||||
@ -74,6 +75,8 @@
|
|||||||
- [x] [115](https://115.com/)
|
- [x] [115](https://115.com/)
|
||||||
- [X] Cloudreve
|
- [X] Cloudreve
|
||||||
- [x] [Dropbox](https://www.dropbox.com/)
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
|
- [x] [FeijiPan](https://www.feijipan.com/)
|
||||||
|
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
|
||||||
- [x] デプロイが簡単で、すぐに使える
|
- [x] デプロイが簡単で、すぐに使える
|
||||||
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
|
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
|
||||||
- [x] ギャラリーモードでの画像プレビュー
|
- [x] ギャラリーモードでの画像プレビュー
|
||||||
@ -112,9 +115,9 @@ https://alist.nn.ci/guide/sponsor.html
|
|||||||
|
|
||||||
### スペシャルスポンサー
|
### スペシャルスポンサー
|
||||||
|
|
||||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
|
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
## コントリビューター
|
## コントリビューター
|
||||||
|
|
||||||
|
87
build.sh
87
build.sh
@ -49,6 +49,7 @@ BuildWinArm64() {
|
|||||||
export GOARCH=arm64
|
export GOARCH=arm64
|
||||||
export CC=$(pwd)/wrapper/zcc-arm64
|
export CC=$(pwd)/wrapper/zcc-arm64
|
||||||
export CXX=$(pwd)/wrapper/zcxx-arm64
|
export CXX=$(pwd)/wrapper/zcxx-arm64
|
||||||
|
export CGO_ENABLED=1
|
||||||
go build -o "$1" -ldflags="$ldflags" -tags=jsoniter .
|
go build -o "$1" -ldflags="$ldflags" -tags=jsoniter .
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -75,7 +76,7 @@ BuildDev() {
|
|||||||
export CGO_ENABLED=1
|
export CGO_ENABLED=1
|
||||||
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||||
done
|
done
|
||||||
xgo -targets=windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
xgo -targets=windows/amd64,darwin/amd64,darwin/arm64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||||
mv alist-* dist
|
mv alist-* dist
|
||||||
cd dist
|
cd dist
|
||||||
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
||||||
@ -84,10 +85,61 @@ BuildDev() {
|
|||||||
cat md5.txt
|
cat md5.txt
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PrepareBuildDocker() {
|
||||||
|
echo "replace github.com/mattn/go-sqlite3 => github.com/leso-kn/go-sqlite3 v0.0.0-20230710125852-03158dc838ed" >>go.mod
|
||||||
|
go get gorm.io/driver/sqlite@v1.4.4
|
||||||
|
go mod download
|
||||||
|
}
|
||||||
|
|
||||||
BuildDocker() {
|
BuildDocker() {
|
||||||
|
PrepareBuildDocker
|
||||||
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
|
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BuildDockerMultiplatform() {
|
||||||
|
PrepareBuildDocker
|
||||||
|
|
||||||
|
BASE="https://musl.cc/"
|
||||||
|
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
|
||||||
|
for i in "${FILES[@]}"; do
|
||||||
|
url="${BASE}${i}.tgz"
|
||||||
|
curl -L -o "${i}.tgz" "${url}"
|
||||||
|
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||||
|
rm -f "${i}.tgz"
|
||||||
|
done
|
||||||
|
|
||||||
|
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
||||||
|
export CGO_ENABLED=1
|
||||||
|
|
||||||
|
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x)
|
||||||
|
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc)
|
||||||
|
for i in "${!OS_ARCHES[@]}"; do
|
||||||
|
os_arch=${OS_ARCHES[$i]}
|
||||||
|
cgo_cc=${CGO_ARGS[$i]}
|
||||||
|
os=${os_arch%%-*}
|
||||||
|
arch=${os_arch##*-}
|
||||||
|
export GOOS=$os
|
||||||
|
export GOARCH=$arch
|
||||||
|
export CC=${cgo_cc}
|
||||||
|
echo "building for $os_arch"
|
||||||
|
go build -o ./$os/$arch/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
||||||
|
done
|
||||||
|
|
||||||
|
DOCKER_ARM_ARCHES=(linux-arm/v6 linux-arm/v7)
|
||||||
|
CGO_ARGS=(armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc)
|
||||||
|
GO_ARM=(6 7)
|
||||||
|
export GOOS=linux
|
||||||
|
export GOARCH=arm
|
||||||
|
for i in "${!DOCKER_ARM_ARCHES[@]}"; do
|
||||||
|
docker_arch=${DOCKER_ARM_ARCHES[$i]}
|
||||||
|
cgo_cc=${CGO_ARGS[$i]}
|
||||||
|
export GOARM=${GO_ARM[$i]}
|
||||||
|
export CC=${cgo_cc}
|
||||||
|
echo "building for $docker_arch"
|
||||||
|
go build -o ./${docker_arch%%-*}/${docker_arch##*-}/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
BuildRelease() {
|
BuildRelease() {
|
||||||
rm -rf .git/
|
rm -rf .git/
|
||||||
mkdir -p "build"
|
mkdir -p "build"
|
||||||
@ -159,6 +211,27 @@ BuildReleaseLinuxMuslArm() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BuildReleaseAndroid() {
|
||||||
|
rm -rf .git/
|
||||||
|
mkdir -p "build"
|
||||||
|
wget https://dl.google.com/android/repository/android-ndk-r26b-linux.zip
|
||||||
|
unzip android-ndk-r26b-linux.zip
|
||||||
|
rm android-ndk-r26b-linux.zip
|
||||||
|
OS_ARCHES=(amd64 arm64 386 arm)
|
||||||
|
CGO_ARGS=(x86_64-linux-android24-clang aarch64-linux-android24-clang i686-linux-android24-clang armv7a-linux-androideabi24-clang)
|
||||||
|
for i in "${!OS_ARCHES[@]}"; do
|
||||||
|
os_arch=${OS_ARCHES[$i]}
|
||||||
|
cgo_cc=$(realpath android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/${CGO_ARGS[$i]})
|
||||||
|
echo building for android-${os_arch}
|
||||||
|
export GOOS=android
|
||||||
|
export GOARCH=${os_arch##*-}
|
||||||
|
export CC=${cgo_cc}
|
||||||
|
export CGO_ENABLED=1
|
||||||
|
go build -o ./build/$appName-android-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
||||||
|
android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip ./build/$appName-android-$os_arch
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
MakeRelease() {
|
MakeRelease() {
|
||||||
cd build
|
cd build
|
||||||
mkdir compress
|
mkdir compress
|
||||||
@ -166,6 +239,11 @@ MakeRelease() {
|
|||||||
cp "$i" alist
|
cp "$i" alist
|
||||||
tar -czvf compress/"$i".tar.gz alist
|
tar -czvf compress/"$i".tar.gz alist
|
||||||
rm -f alist
|
rm -f alist
|
||||||
|
done
|
||||||
|
for i in $(find . -type f -name "$appName-android-*"); do
|
||||||
|
cp "$i" alist
|
||||||
|
tar -czvf compress/"$i".tar.gz alist
|
||||||
|
rm -f alist
|
||||||
done
|
done
|
||||||
for i in $(find . -type f -name "$appName-darwin-*"); do
|
for i in $(find . -type f -name "$appName-darwin-*"); do
|
||||||
cp "$i" alist
|
cp "$i" alist
|
||||||
@ -187,6 +265,8 @@ if [ "$1" = "dev" ]; then
|
|||||||
FetchWebDev
|
FetchWebDev
|
||||||
if [ "$2" = "docker" ]; then
|
if [ "$2" = "docker" ]; then
|
||||||
BuildDocker
|
BuildDocker
|
||||||
|
elif [ "$2" = "docker-multiplatform" ]; then
|
||||||
|
BuildDockerMultiplatform
|
||||||
else
|
else
|
||||||
BuildDev
|
BuildDev
|
||||||
fi
|
fi
|
||||||
@ -194,12 +274,17 @@ elif [ "$1" = "release" ]; then
|
|||||||
FetchWebRelease
|
FetchWebRelease
|
||||||
if [ "$2" = "docker" ]; then
|
if [ "$2" = "docker" ]; then
|
||||||
BuildDocker
|
BuildDocker
|
||||||
|
elif [ "$2" = "docker-multiplatform" ]; then
|
||||||
|
BuildDockerMultiplatform
|
||||||
elif [ "$2" = "linux_musl_arm" ]; then
|
elif [ "$2" = "linux_musl_arm" ]; then
|
||||||
BuildReleaseLinuxMuslArm
|
BuildReleaseLinuxMuslArm
|
||||||
MakeRelease "md5-linux-musl-arm.txt"
|
MakeRelease "md5-linux-musl-arm.txt"
|
||||||
elif [ "$2" = "linux_musl" ]; then
|
elif [ "$2" = "linux_musl" ]; then
|
||||||
BuildReleaseLinuxMusl
|
BuildReleaseLinuxMusl
|
||||||
MakeRelease "md5-linux-musl.txt"
|
MakeRelease "md5-linux-musl.txt"
|
||||||
|
elif [ "$2" = "android" ]; then
|
||||||
|
BuildReleaseAndroid
|
||||||
|
MakeRelease "md5-android.txt"
|
||||||
else
|
else
|
||||||
BuildRelease
|
BuildRelease
|
||||||
MakeRelease "md5.txt"
|
MakeRelease "md5.txt"
|
||||||
|
@ -5,6 +5,8 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/cmd/flags"
|
"github.com/alist-org/alist/v3/cmd/flags"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers"
|
||||||
|
_ "github.com/alist-org/alist/v3/internal/offline_download"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ package cmd
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -13,7 +14,6 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/cmd/flags"
|
"github.com/alist-org/alist/v3/cmd/flags"
|
||||||
_ "github.com/alist-org/alist/v3/drivers"
|
|
||||||
"github.com/alist-org/alist/v3/internal/bootstrap"
|
"github.com/alist-org/alist/v3/internal/bootstrap"
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
@ -35,9 +35,9 @@ the address is defined in config file`,
|
|||||||
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
|
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
|
||||||
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
|
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
|
||||||
}
|
}
|
||||||
bootstrap.InitAria2()
|
bootstrap.InitOfflineDownloadTools()
|
||||||
bootstrap.InitQbittorrent()
|
|
||||||
bootstrap.LoadStorages()
|
bootstrap.LoadStorages()
|
||||||
|
bootstrap.InitTaskManager()
|
||||||
if !flags.Debug && !flags.Dev {
|
if !flags.Debug && !flags.Dev {
|
||||||
gin.SetMode(gin.ReleaseMode)
|
gin.SetMode(gin.ReleaseMode)
|
||||||
}
|
}
|
||||||
@ -51,7 +51,7 @@ the address is defined in config file`,
|
|||||||
httpSrv = &http.Server{Addr: httpBase, Handler: r}
|
httpSrv = &http.Server{Addr: httpBase, Handler: r}
|
||||||
go func() {
|
go func() {
|
||||||
err := httpSrv.ListenAndServe()
|
err := httpSrv.ListenAndServe()
|
||||||
if err != nil && err != http.ErrServerClosed {
|
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||||
utils.Log.Fatalf("failed to start http: %s", err.Error())
|
utils.Log.Fatalf("failed to start http: %s", err.Error())
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
@ -62,7 +62,7 @@ the address is defined in config file`,
|
|||||||
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
|
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
|
||||||
go func() {
|
go func() {
|
||||||
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||||
if err != nil && err != http.ErrServerClosed {
|
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||||
utils.Log.Fatalf("failed to start https: %s", err.Error())
|
utils.Log.Fatalf("failed to start https: %s", err.Error())
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
@ -86,11 +86,32 @@ the address is defined in config file`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
err = unixSrv.Serve(listener)
|
err = unixSrv.Serve(listener)
|
||||||
if err != nil && err != http.ErrServerClosed {
|
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||||
utils.Log.Fatalf("failed to start unix: %s", err.Error())
|
utils.Log.Fatalf("failed to start unix: %s", err.Error())
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
s3r := gin.New()
|
||||||
|
s3r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
||||||
|
server.InitS3(s3r)
|
||||||
|
if conf.Conf.S3.Port != -1 {
|
||||||
|
s3Base := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.S3.Port)
|
||||||
|
utils.Log.Infof("start S3 server @ %s", s3Base)
|
||||||
|
go func() {
|
||||||
|
var err error
|
||||||
|
if conf.Conf.S3.SSL {
|
||||||
|
httpsSrv = &http.Server{Addr: s3Base, Handler: s3r}
|
||||||
|
err = httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||||
|
}
|
||||||
|
if !conf.Conf.S3.SSL {
|
||||||
|
httpSrv = &http.Server{Addr: s3Base, Handler: s3r}
|
||||||
|
err = httpSrv.ListenAndServe()
|
||||||
|
}
|
||||||
|
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||||
|
utils.Log.Fatalf("failed to start s3 server: %s", err.Error())
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
// Wait for interrupt signal to gracefully shutdown the server with
|
// Wait for interrupt signal to gracefully shutdown the server with
|
||||||
// a timeout of 1 second.
|
// a timeout of 1 second.
|
||||||
quit := make(chan os.Signal, 1)
|
quit := make(chan os.Signal, 1)
|
||||||
|
@ -63,8 +63,9 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
downloadInfo, err := d.client.
|
var userAgent = args.Header.Get("User-Agent")
|
||||||
DownloadWithUA(file.(*FileObj).PickCode, driver115.UA115Browser)
|
downloadInfo, err := d.
|
||||||
|
DownloadWithUA(file.(*FileObj).PickCode, userAgent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -6,17 +6,18 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||||
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
|
||||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
||||||
|
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "115 Cloud",
|
Name: "115 Cloud",
|
||||||
DefaultRoot: "0",
|
DefaultRoot: "0",
|
||||||
OnlyProxy: true,
|
//OnlyProxy: true,
|
||||||
//OnlyLocal: true,
|
//OnlyLocal: true,
|
||||||
NoOverwriteUpload: true,
|
NoOverwriteUpload: true,
|
||||||
}
|
}
|
||||||
|
@ -5,12 +5,8 @@ import (
|
|||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
|
||||||
"github.com/orzogc/fake115uploader/cipher"
|
|
||||||
"io"
|
"io"
|
||||||
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
@ -18,29 +14,35 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
|
||||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||||
|
|
||||||
|
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
|
crypto "github.com/gaoyb7/115drive-webdav/115"
|
||||||
|
"github.com/orzogc/fake115uploader/cipher"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
var UserAgent = driver.UA115Desktop
|
var UserAgent = driver115.UA115Desktop
|
||||||
|
|
||||||
func (d *Pan115) login() error {
|
func (d *Pan115) login() error {
|
||||||
var err error
|
var err error
|
||||||
opts := []driver.Option{
|
opts := []driver115.Option{
|
||||||
driver.UA(UserAgent),
|
driver115.UA(UserAgent),
|
||||||
func(c *driver.Pan115Client) {
|
func(c *driver115.Pan115Client) {
|
||||||
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
d.client = driver.New(opts...)
|
d.client = driver115.New(opts...)
|
||||||
cr := &driver.Credential{}
|
cr := &driver115.Credential{}
|
||||||
if d.Addition.QRCodeToken != "" {
|
if d.Addition.QRCodeToken != "" {
|
||||||
s := &driver.QRCodeSession{
|
s := &driver115.QRCodeSession{
|
||||||
UID: d.Addition.QRCodeToken,
|
UID: d.Addition.QRCodeToken,
|
||||||
}
|
}
|
||||||
if cr, err = d.client.QRCodeLogin(s); err != nil {
|
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||||
return errors.Wrap(err, "failed to login by qrcode")
|
return errors.Wrap(err, "failed to login by qrcode")
|
||||||
}
|
}
|
||||||
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||||
@ -59,7 +61,7 @@ func (d *Pan115) login() error {
|
|||||||
func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
||||||
res := make([]FileObj, 0)
|
res := make([]FileObj, 0)
|
||||||
if d.PageSize <= 0 {
|
if d.PageSize <= 0 {
|
||||||
d.PageSize = driver.FileListLimit
|
d.PageSize = driver115.FileListLimit
|
||||||
}
|
}
|
||||||
files, err := d.client.ListWithLimit(fileId, d.PageSize)
|
files, err := d.client.ListWithLimit(fileId, d.PageSize)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -75,6 +77,61 @@ const (
|
|||||||
appVer = "2.0.3.6"
|
appVer = "2.0.3.6"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
||||||
|
key := crypto.GenerateKey()
|
||||||
|
result := driver115.DownloadResp{}
|
||||||
|
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
data := crypto.Encode(params, key)
|
||||||
|
|
||||||
|
bodyReader := strings.NewReader(url.Values{"data": []string{data}}.Encode())
|
||||||
|
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
|
||||||
|
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
||||||
|
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||||
|
req.Header.Set("Cookie", c.Cookie)
|
||||||
|
req.Header.Set("User-Agent", ua)
|
||||||
|
|
||||||
|
resp, err := c.client.Client.GetClient().Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := utils.Json.Unmarshal(body, &result); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = result.Err(string(body)); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
bytes, err := crypto.Decode(string(result.EncodedData), key)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
downloadInfo := driver115.DownloadData{}
|
||||||
|
if err := utils.Json.Unmarshal(bytes, &downloadInfo); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, info := range downloadInfo {
|
||||||
|
if info.FileSize < 0 {
|
||||||
|
return nil, driver115.ErrDownloadEmpty
|
||||||
|
}
|
||||||
|
info.Header = resp.Request.Header
|
||||||
|
return info, nil
|
||||||
|
}
|
||||||
|
return nil, driver115.ErrUnexpected
|
||||||
|
}
|
||||||
|
|
||||||
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
||||||
var (
|
var (
|
||||||
ecdhCipher *cipher.EcdhCipher
|
ecdhCipher *cipher.EcdhCipher
|
||||||
@ -249,7 +306,7 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
go func(threadId int) {
|
go func(threadId int) {
|
||||||
defer func() {
|
defer func() {
|
||||||
if r := recover(); r != nil {
|
if r := recover(); r != nil {
|
||||||
errCh <- fmt.Errorf("Recovered in %v", r)
|
errCh <- fmt.Errorf("recovered in %v", r)
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
for chunk := range chunksCh {
|
for chunk := range chunksCh {
|
||||||
|
112
drivers/115_share/driver.go
Normal file
112
drivers/115_share/driver.go
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
package _115_share
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Pan115Share struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
client *driver115.Pan115Client
|
||||||
|
limiter *rate.Limiter
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Init(ctx context.Context) error {
|
||||||
|
if d.LimitRate > 0 {
|
||||||
|
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return d.login()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) WaitLimit(ctx context.Context) error {
|
||||||
|
if d.limiter != nil {
|
||||||
|
return d.limiter.Wait(ctx)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
files := make([]driver115.ShareFile, 0)
|
||||||
|
fileResp, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, dir.GetID(), driver115.QueryLimit(int(d.PageSize)))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
files = append(files, fileResp.Data.List...)
|
||||||
|
total := fileResp.Data.Count
|
||||||
|
count := len(fileResp.Data.List)
|
||||||
|
for total > count {
|
||||||
|
fileResp, err := d.client.GetShareSnap(
|
||||||
|
d.ShareCode, d.ReceiveCode, dir.GetID(),
|
||||||
|
driver115.QueryLimit(int(d.PageSize)), driver115.QueryOffset(count),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
files = append(files, fileResp.Data.List...)
|
||||||
|
count += len(fileResp.Data.List)
|
||||||
|
}
|
||||||
|
|
||||||
|
return utils.SliceConvert(files, transFunc)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
downloadInfo, err := d.client.DownloadByShareCode(d.ShareCode, d.ReceiveCode, file.GetID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Link{URL: downloadInfo.URL.URL}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115Share) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*Pan115Share)(nil)
|
34
drivers/115_share/meta.go
Normal file
34
drivers/115_share/meta.go
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
package _115_share
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||||
|
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||||
|
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
|
||||||
|
PageSize int64 `json:"page_size" type:"number" default:"20" help:"list api per page size of 115 driver"`
|
||||||
|
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||||
|
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
||||||
|
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
||||||
|
driver.RootID
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "115 Share",
|
||||||
|
DefaultRoot: "",
|
||||||
|
// OnlyProxy: true,
|
||||||
|
// OnlyLocal: true,
|
||||||
|
CheckStatus: false,
|
||||||
|
Alert: "",
|
||||||
|
NoOverwriteUpload: true,
|
||||||
|
NoUpload: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &Pan115Share{}
|
||||||
|
})
|
||||||
|
}
|
111
drivers/115_share/utils.go
Normal file
111
drivers/115_share/utils.go
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
package _115_share
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ model.Obj = (*FileObj)(nil)
|
||||||
|
|
||||||
|
type FileObj struct {
|
||||||
|
Size int64
|
||||||
|
Sha1 string
|
||||||
|
Utm time.Time
|
||||||
|
FileName string
|
||||||
|
isDir bool
|
||||||
|
FileID string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FileObj) CreateTime() time.Time {
|
||||||
|
return f.Utm
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FileObj) GetHash() utils.HashInfo {
|
||||||
|
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FileObj) GetSize() int64 {
|
||||||
|
return f.Size
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FileObj) GetName() string {
|
||||||
|
return f.FileName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FileObj) ModTime() time.Time {
|
||||||
|
return f.Utm
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FileObj) IsDir() bool {
|
||||||
|
return f.isDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FileObj) GetID() string {
|
||||||
|
return f.FileID
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FileObj) GetPath() string {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func transFunc(sf driver115.ShareFile) (model.Obj, error) {
|
||||||
|
timeInt, err := strconv.ParseInt(sf.UpdateTime, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var (
|
||||||
|
utm = time.Unix(timeInt, 0)
|
||||||
|
isDir = (sf.IsFile == 0)
|
||||||
|
fileID = string(sf.FileID)
|
||||||
|
)
|
||||||
|
if isDir {
|
||||||
|
fileID = string(sf.CategoryID)
|
||||||
|
}
|
||||||
|
return &FileObj{
|
||||||
|
Size: int64(sf.Size),
|
||||||
|
Sha1: sf.Sha1,
|
||||||
|
Utm: utm,
|
||||||
|
FileName: string(sf.FileName),
|
||||||
|
isDir: isDir,
|
||||||
|
FileID: fileID,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var UserAgent = driver115.UA115Browser
|
||||||
|
|
||||||
|
func (d *Pan115Share) login() error {
|
||||||
|
var err error
|
||||||
|
opts := []driver115.Option{
|
||||||
|
driver115.UA(UserAgent),
|
||||||
|
}
|
||||||
|
d.client = driver115.New(opts...)
|
||||||
|
if _, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, ""); err != nil {
|
||||||
|
return errors.Wrap(err, "failed to get share snap")
|
||||||
|
}
|
||||||
|
cr := &driver115.Credential{}
|
||||||
|
if d.QRCodeToken != "" {
|
||||||
|
s := &driver115.QRCodeSession{
|
||||||
|
UID: d.QRCodeToken,
|
||||||
|
}
|
||||||
|
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||||
|
return errors.Wrap(err, "failed to login by qrcode")
|
||||||
|
}
|
||||||
|
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||||
|
d.QRCodeToken = ""
|
||||||
|
} else if d.Cookie != "" {
|
||||||
|
if err = cr.FromCookie(d.Cookie); err != nil {
|
||||||
|
return errors.Wrap(err, "failed to login by cookies")
|
||||||
|
}
|
||||||
|
d.client.ImportCredential(cr)
|
||||||
|
} else {
|
||||||
|
return errors.New("missing cookie or qrcode account")
|
||||||
|
}
|
||||||
|
|
||||||
|
return d.client.LoginCheck()
|
||||||
|
}
|
@ -6,6 +6,13 @@ import (
|
|||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -17,14 +24,12 @@ import (
|
|||||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Pan123 struct {
|
type Pan123 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
|
apiRateLimit sync.Map
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) Config() driver.Config {
|
func (d *Pan123) Config() driver.Config {
|
||||||
@ -232,6 +237,9 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
uploader := s3manager.NewUploader(s)
|
uploader := s3manager.NewUploader(s)
|
||||||
|
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||||
|
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||||
|
}
|
||||||
input := &s3manager.UploadInput{
|
input := &s3manager.UploadInput{
|
||||||
Bucket: &resp.Data.Bucket,
|
Bucket: &resp.Data.Bucket,
|
||||||
Key: &resp.Data.Key,
|
Key: &resp.Data.Key,
|
||||||
@ -250,4 +258,11 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Pan123) APIRateLimit(api string) bool {
|
||||||
|
limiter, _ := d.apiRateLimit.LoadOrStore(api,
|
||||||
|
rate.NewLimiter(rate.Every(time.Millisecond*700), 1))
|
||||||
|
ins := limiter.(*rate.Limiter)
|
||||||
|
return ins.Allow()
|
||||||
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Pan123)(nil)
|
var _ driver.Driver = (*Pan123)(nil)
|
||||||
|
@ -107,7 +107,7 @@ func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.Fi
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
up(j * 100 / chunkCount)
|
up(float64(j) * 100 / float64(chunkCount))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// complete s3 upload
|
// complete s3 upload
|
||||||
|
@ -3,12 +3,18 @@ package _123
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/crc32"
|
||||||
|
"math"
|
||||||
|
"math/rand"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
resty "github.com/go-resty/resty/v2"
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -18,7 +24,7 @@ const (
|
|||||||
Api = "https://www.123pan.com/api"
|
Api = "https://www.123pan.com/api"
|
||||||
AApi = "https://www.123pan.com/a/api"
|
AApi = "https://www.123pan.com/a/api"
|
||||||
BApi = "https://www.123pan.com/b/api"
|
BApi = "https://www.123pan.com/b/api"
|
||||||
MainApi = Api
|
MainApi = BApi
|
||||||
SignIn = MainApi + "/user/sign_in"
|
SignIn = MainApi + "/user/sign_in"
|
||||||
Logout = MainApi + "/user/logout"
|
Logout = MainApi + "/user/logout"
|
||||||
UserInfo = MainApi + "/user/info"
|
UserInfo = MainApi + "/user/info"
|
||||||
@ -37,6 +43,104 @@ const (
|
|||||||
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func signPath(path string, os string, version string) (k string, v string) {
|
||||||
|
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
|
||||||
|
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
|
||||||
|
now := time.Now().In(time.FixedZone("CST", 8*3600))
|
||||||
|
timestamp := fmt.Sprint(now.Unix())
|
||||||
|
nowStr := []byte(now.Format("200601021504"))
|
||||||
|
for i := 0; i < len(nowStr); i++ {
|
||||||
|
nowStr[i] = table[nowStr[i]-48]
|
||||||
|
}
|
||||||
|
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
|
||||||
|
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
|
||||||
|
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
|
||||||
|
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetApi(rawUrl string) string {
|
||||||
|
u, _ := url.Parse(rawUrl)
|
||||||
|
query := u.Query()
|
||||||
|
query.Add(signPath(u.Path, "web", "3"))
|
||||||
|
u.RawQuery = query.Encode()
|
||||||
|
return u.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
//func GetApi(url string) string {
|
||||||
|
// vm := js.New()
|
||||||
|
// vm.Set("url", url[22:])
|
||||||
|
// r, err := vm.RunString(`
|
||||||
|
// (function(e){
|
||||||
|
// function A(t, e) {
|
||||||
|
// e = 1 < arguments.length && void 0 !== e ? e : 10;
|
||||||
|
// for (var n = function() {
|
||||||
|
// for (var t = [], e = 0; e < 256; e++) {
|
||||||
|
// for (var n = e, r = 0; r < 8; r++)
|
||||||
|
// n = 1 & n ? 3988292384 ^ n >>> 1 : n >>> 1;
|
||||||
|
// t[e] = n
|
||||||
|
// }
|
||||||
|
// return t
|
||||||
|
// }(), r = function(t) {
|
||||||
|
// t = t.replace(/\\r\\n/g, "\\n");
|
||||||
|
// for (var e = "", n = 0; n < t.length; n++) {
|
||||||
|
// var r = t.charCodeAt(n);
|
||||||
|
// r < 128 ? e += String.fromCharCode(r) : e = 127 < r && r < 2048 ? (e += String.fromCharCode(r >> 6 | 192)) + String.fromCharCode(63 & r | 128) : (e = (e += String.fromCharCode(r >> 12 | 224)) + String.fromCharCode(r >> 6 & 63 | 128)) + String.fromCharCode(63 & r | 128)
|
||||||
|
// }
|
||||||
|
// return e
|
||||||
|
// }(t), a = -1, i = 0; i < r.length; i++)
|
||||||
|
// a = a >>> 8 ^ n[255 & (a ^ r.charCodeAt(i))];
|
||||||
|
// return (a = (-1 ^ a) >>> 0).toString(e)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// function v(t) {
|
||||||
|
// return (v = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(t) {
|
||||||
|
// return typeof t
|
||||||
|
// }
|
||||||
|
// : function(t) {
|
||||||
|
// return t && "function" == typeof Symbol && t.constructor === Symbol && t !== Symbol.prototype ? "symbol" : typeof t
|
||||||
|
// }
|
||||||
|
// )(t)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// for (p in a = Math.round(1e7 * Math.random()),
|
||||||
|
// o = Math.round(((new Date).getTime() + 60 * (new Date).getTimezoneOffset() * 1e3 + 288e5) / 1e3).toString(),
|
||||||
|
// m = ["a", "d", "e", "f", "g", "h", "l", "m", "y", "i", "j", "n", "o", "p", "k", "q", "r", "s", "t", "u", "b", "c", "v", "w", "s", "z"],
|
||||||
|
// u = function(t, e, n) {
|
||||||
|
// var r;
|
||||||
|
// n = 2 < arguments.length && void 0 !== n ? n : 8;
|
||||||
|
// return 0 === arguments.length ? null : (r = "object" === v(t) ? t : (10 === "".concat(t).length && (t = 1e3 * Number.parseInt(t)),
|
||||||
|
// new Date(t)),
|
||||||
|
// t += 6e4 * new Date(t).getTimezoneOffset(),
|
||||||
|
// {
|
||||||
|
// y: (r = new Date(t + 36e5 * n)).getFullYear(),
|
||||||
|
// m: r.getMonth() + 1 < 10 ? "0".concat(r.getMonth() + 1) : r.getMonth() + 1,
|
||||||
|
// d: r.getDate() < 10 ? "0".concat(r.getDate()) : r.getDate(),
|
||||||
|
// h: r.getHours() < 10 ? "0".concat(r.getHours()) : r.getHours(),
|
||||||
|
// f: r.getMinutes() < 10 ? "0".concat(r.getMinutes()) : r.getMinutes()
|
||||||
|
// })
|
||||||
|
// }(o),
|
||||||
|
// h = u.y,
|
||||||
|
// g = u.m,
|
||||||
|
// l = u.d,
|
||||||
|
// c = u.h,
|
||||||
|
// u = u.f,
|
||||||
|
// d = [h, g, l, c, u].join(""),
|
||||||
|
// f = [],
|
||||||
|
// d)
|
||||||
|
// f.push(m[Number(d[p])]);
|
||||||
|
// return h = A(f.join("")),
|
||||||
|
// g = A("".concat(o, "|").concat(a, "|").concat(e, "|").concat("web", "|").concat("3", "|").concat(h)),
|
||||||
|
// "".concat(h, "=").concat(o, "-").concat(a, "-").concat(g);
|
||||||
|
// })(url)
|
||||||
|
// `)
|
||||||
|
// if err != nil {
|
||||||
|
// fmt.Println(err)
|
||||||
|
// return url
|
||||||
|
// }
|
||||||
|
// v, _ := r.Export().(string)
|
||||||
|
// return url + "?" + v
|
||||||
|
//}
|
||||||
|
|
||||||
func (d *Pan123) login() error {
|
func (d *Pan123) login() error {
|
||||||
var body base.Json
|
var body base.Json
|
||||||
if utils.IsEmailFormat(d.Username) {
|
if utils.IsEmailFormat(d.Username) {
|
||||||
@ -56,9 +160,9 @@ func (d *Pan123) login() error {
|
|||||||
SetHeaders(map[string]string{
|
SetHeaders(map[string]string{
|
||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
"referer": "https://www.123pan.com/",
|
"referer": "https://www.123pan.com/",
|
||||||
"user-agent": "Dart/2.19(dart:io)",
|
"user-agent": "Dart/2.19(dart:io)-alist",
|
||||||
"platform": "android",
|
"platform": "web",
|
||||||
"app-version": "36",
|
"app-version": "3",
|
||||||
//"user-agent": base.UserAgent,
|
//"user-agent": base.UserAgent,
|
||||||
}).
|
}).
|
||||||
SetBody(body).Post(SignIn)
|
SetBody(body).Post(SignIn)
|
||||||
@ -93,9 +197,9 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
|||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
"referer": "https://www.123pan.com/",
|
"referer": "https://www.123pan.com/",
|
||||||
"authorization": "Bearer " + d.AccessToken,
|
"authorization": "Bearer " + d.AccessToken,
|
||||||
"user-agent": "Dart/2.19(dart:io)",
|
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) alist-client",
|
||||||
"platform": "android",
|
"platform": "web",
|
||||||
"app-version": "36",
|
"app-version": "3",
|
||||||
//"user-agent": base.UserAgent,
|
//"user-agent": base.UserAgent,
|
||||||
})
|
})
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
@ -109,7 +213,7 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
|||||||
// return nil, err
|
// return nil, err
|
||||||
//}
|
//}
|
||||||
//req.SetQueryParam("auth-key", *authKey)
|
//req.SetQueryParam("auth-key", *authKey)
|
||||||
res, err := req.Execute(method, url)
|
res, err := req.Execute(method, GetApi(url))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -131,17 +235,27 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
|||||||
func (d *Pan123) getFiles(parentId string) ([]File, error) {
|
func (d *Pan123) getFiles(parentId string) ([]File, error) {
|
||||||
page := 1
|
page := 1
|
||||||
res := make([]File, 0)
|
res := make([]File, 0)
|
||||||
|
// 2024-02-06 fix concurrency by 123pan
|
||||||
for {
|
for {
|
||||||
|
if !d.APIRateLimit(FileList) {
|
||||||
|
time.Sleep(time.Millisecond * 200)
|
||||||
|
continue
|
||||||
|
}
|
||||||
var resp Files
|
var resp Files
|
||||||
query := map[string]string{
|
query := map[string]string{
|
||||||
"driveId": "0",
|
"driveId": "0",
|
||||||
"limit": "100",
|
"limit": "100",
|
||||||
"next": "0",
|
"next": "0",
|
||||||
"orderBy": d.OrderBy,
|
"orderBy": d.OrderBy,
|
||||||
"orderDirection": d.OrderDirection,
|
"orderDirection": d.OrderDirection,
|
||||||
"parentFileId": parentId,
|
"parentFileId": parentId,
|
||||||
"trashed": "false",
|
"trashed": "false",
|
||||||
"Page": strconv.Itoa(page),
|
"SearchData": "",
|
||||||
|
"Page": strconv.Itoa(page),
|
||||||
|
"OnlyLookAbnormalFile": "0",
|
||||||
|
"event": "homeListFile",
|
||||||
|
"operateType": "4",
|
||||||
|
"inDirectSpace": "false",
|
||||||
}
|
}
|
||||||
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
|
@ -8,18 +8,21 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/cron"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Yun139 struct {
|
type Yun139 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
|
cron *cron.Cron
|
||||||
Account string
|
Account string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,61 +38,116 @@ func (d *Yun139) Init(ctx context.Context) error {
|
|||||||
if d.Authorization == "" {
|
if d.Authorization == "" {
|
||||||
return fmt.Errorf("authorization is empty")
|
return fmt.Errorf("authorization is empty")
|
||||||
}
|
}
|
||||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
d.cron = cron.NewCron(time.Hour * 24 * 7)
|
||||||
if err != nil {
|
d.cron.Do(func() {
|
||||||
return err
|
err := d.refreshToken()
|
||||||
}
|
if err != nil {
|
||||||
decodeStr := string(decode)
|
log.Errorf("%+v", err)
|
||||||
splits := strings.Split(decodeStr, ":")
|
}
|
||||||
if len(splits) < 2 {
|
})
|
||||||
return fmt.Errorf("authorization is invalid, splits < 2")
|
switch d.Addition.Type {
|
||||||
}
|
case MetaPersonalNew:
|
||||||
d.Account = splits[1]
|
if len(d.Addition.RootFolderID) == 0 {
|
||||||
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
d.RootFolderID = "/"
|
||||||
"qryUserExternInfoReq": base.Json{
|
}
|
||||||
"commonAccountInfo": base.Json{
|
return nil
|
||||||
"account": d.Account,
|
case MetaPersonal:
|
||||||
"accountType": 1,
|
if len(d.Addition.RootFolderID) == 0 {
|
||||||
|
d.RootFolderID = "root"
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
|
case MetaFamily:
|
||||||
|
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
decodeStr := string(decode)
|
||||||
|
splits := strings.Split(decodeStr, ":")
|
||||||
|
if len(splits) < 2 {
|
||||||
|
return fmt.Errorf("authorization is invalid, splits < 2")
|
||||||
|
}
|
||||||
|
d.Account = splits[1]
|
||||||
|
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
||||||
|
"qryUserExternInfoReq": base.Json{
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
}, nil)
|
||||||
}, nil)
|
return err
|
||||||
return err
|
default:
|
||||||
|
return errs.NotImplement
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Drop(ctx context.Context) error {
|
func (d *Yun139) Drop(ctx context.Context) error {
|
||||||
|
if d.cron != nil {
|
||||||
|
d.cron.Stop()
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
if d.isFamily() {
|
switch d.Addition.Type {
|
||||||
return d.familyGetFiles(dir.GetID())
|
case MetaPersonalNew:
|
||||||
} else {
|
return d.personalGetFiles(dir.GetID())
|
||||||
|
case MetaPersonal:
|
||||||
return d.getFiles(dir.GetID())
|
return d.getFiles(dir.GetID())
|
||||||
|
case MetaFamily:
|
||||||
|
return d.familyGetFiles(dir.GetID())
|
||||||
|
default:
|
||||||
|
return nil, errs.NotImplement
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
u, err := d.getLink(file.GetID())
|
var url string
|
||||||
|
var err error
|
||||||
|
switch d.Addition.Type {
|
||||||
|
case MetaPersonalNew:
|
||||||
|
url, err = d.personalGetLink(file.GetID())
|
||||||
|
case MetaPersonal:
|
||||||
|
fallthrough
|
||||||
|
case MetaFamily:
|
||||||
|
url, err = d.getLink(file.GetID())
|
||||||
|
default:
|
||||||
|
return nil, errs.NotImplement
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return &model.Link{URL: u}, nil
|
return &model.Link{URL: url}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
data := base.Json{
|
var err error
|
||||||
"createCatalogExtReq": base.Json{
|
switch d.Addition.Type {
|
||||||
"parentCatalogID": parentDir.GetID(),
|
case MetaPersonalNew:
|
||||||
"newCatalogName": dirName,
|
data := base.Json{
|
||||||
"commonAccountInfo": base.Json{
|
"parentFileId": parentDir.GetID(),
|
||||||
"account": d.Account,
|
"name": dirName,
|
||||||
"accountType": 1,
|
"description": "",
|
||||||
|
"type": "folder",
|
||||||
|
"fileRenameMode": "force_rename",
|
||||||
|
}
|
||||||
|
pathname := "/hcy/file/create"
|
||||||
|
_, err = d.personalPost(pathname, data, nil)
|
||||||
|
case MetaPersonal:
|
||||||
|
data := base.Json{
|
||||||
|
"createCatalogExtReq": base.Json{
|
||||||
|
"parentCatalogID": parentDir.GetID(),
|
||||||
|
"newCatalogName": dirName,
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
}
|
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
|
||||||
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
|
_, err = d.post(pathname, data, nil)
|
||||||
if d.isFamily() {
|
case MetaFamily:
|
||||||
data = base.Json{
|
data := base.Json{
|
||||||
"cloudID": d.CloudID,
|
"cloudID": d.CloudID,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.Account,
|
"account": d.Account,
|
||||||
@ -97,147 +155,198 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
|||||||
},
|
},
|
||||||
"docLibName": dirName,
|
"docLibName": dirName,
|
||||||
}
|
}
|
||||||
pathname = "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
|
pathname := "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
|
||||||
|
_, err = d.post(pathname, data, nil)
|
||||||
|
default:
|
||||||
|
err = errs.NotImplement
|
||||||
}
|
}
|
||||||
_, err := d.post(pathname, data, nil)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
if d.isFamily() {
|
switch d.Addition.Type {
|
||||||
|
case MetaPersonalNew:
|
||||||
|
data := base.Json{
|
||||||
|
"fileIds": []string{srcObj.GetID()},
|
||||||
|
"toParentFileId": dstDir.GetID(),
|
||||||
|
}
|
||||||
|
pathname := "/hcy/file/batchMove"
|
||||||
|
_, err := d.personalPost(pathname, data, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return srcObj, nil
|
||||||
|
case MetaPersonal:
|
||||||
|
var contentInfoList []string
|
||||||
|
var catalogInfoList []string
|
||||||
|
if srcObj.IsDir() {
|
||||||
|
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
||||||
|
} else {
|
||||||
|
contentInfoList = append(contentInfoList, srcObj.GetID())
|
||||||
|
}
|
||||||
|
data := base.Json{
|
||||||
|
"createBatchOprTaskReq": base.Json{
|
||||||
|
"taskType": 3,
|
||||||
|
"actionType": "304",
|
||||||
|
"taskInfo": base.Json{
|
||||||
|
"contentInfoList": contentInfoList,
|
||||||
|
"catalogInfoList": catalogInfoList,
|
||||||
|
"newCatalogID": dstDir.GetID(),
|
||||||
|
},
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||||
|
_, err := d.post(pathname, data, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return srcObj, nil
|
||||||
|
default:
|
||||||
return nil, errs.NotImplement
|
return nil, errs.NotImplement
|
||||||
}
|
}
|
||||||
var contentInfoList []string
|
|
||||||
var catalogInfoList []string
|
|
||||||
if srcObj.IsDir() {
|
|
||||||
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
|
||||||
} else {
|
|
||||||
contentInfoList = append(contentInfoList, srcObj.GetID())
|
|
||||||
}
|
|
||||||
data := base.Json{
|
|
||||||
"createBatchOprTaskReq": base.Json{
|
|
||||||
"taskType": 3,
|
|
||||||
"actionType": "304",
|
|
||||||
"taskInfo": base.Json{
|
|
||||||
"contentInfoList": contentInfoList,
|
|
||||||
"catalogInfoList": catalogInfoList,
|
|
||||||
"newCatalogID": dstDir.GetID(),
|
|
||||||
},
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.Account,
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
|
||||||
_, err := d.post(pathname, data, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return srcObj, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
if d.isFamily() {
|
var err error
|
||||||
return errs.NotImplement
|
switch d.Addition.Type {
|
||||||
}
|
case MetaPersonalNew:
|
||||||
var data base.Json
|
data := base.Json{
|
||||||
var pathname string
|
"fileId": srcObj.GetID(),
|
||||||
if srcObj.IsDir() {
|
"name": newName,
|
||||||
data = base.Json{
|
"description": "",
|
||||||
"catalogID": srcObj.GetID(),
|
|
||||||
"catalogName": newName,
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.Account,
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
|
pathname := "/hcy/file/update"
|
||||||
} else {
|
_, err = d.personalPost(pathname, data, nil)
|
||||||
data = base.Json{
|
case MetaPersonal:
|
||||||
"contentID": srcObj.GetID(),
|
var data base.Json
|
||||||
"contentName": newName,
|
var pathname string
|
||||||
"commonAccountInfo": base.Json{
|
if srcObj.IsDir() {
|
||||||
"account": d.Account,
|
data = base.Json{
|
||||||
"accountType": 1,
|
"catalogID": srcObj.GetID(),
|
||||||
},
|
"catalogName": newName,
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
|
||||||
|
} else {
|
||||||
|
data = base.Json{
|
||||||
|
"contentID": srcObj.GetID(),
|
||||||
|
"contentName": newName,
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
|
||||||
}
|
}
|
||||||
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
|
_, err = d.post(pathname, data, nil)
|
||||||
|
default:
|
||||||
|
err = errs.NotImplement
|
||||||
}
|
}
|
||||||
_, err := d.post(pathname, data, nil)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
if d.isFamily() {
|
var err error
|
||||||
return errs.NotImplement
|
switch d.Addition.Type {
|
||||||
}
|
case MetaPersonalNew:
|
||||||
var contentInfoList []string
|
data := base.Json{
|
||||||
var catalogInfoList []string
|
"fileIds": []string{srcObj.GetID()},
|
||||||
if srcObj.IsDir() {
|
"toParentFileId": dstDir.GetID(),
|
||||||
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
}
|
||||||
} else {
|
pathname := "/hcy/file/batchCopy"
|
||||||
contentInfoList = append(contentInfoList, srcObj.GetID())
|
_, err := d.personalPost(pathname, data, nil)
|
||||||
}
|
return err
|
||||||
data := base.Json{
|
case MetaPersonal:
|
||||||
"createBatchOprTaskReq": base.Json{
|
var contentInfoList []string
|
||||||
"taskType": 3,
|
var catalogInfoList []string
|
||||||
"actionType": 309,
|
if srcObj.IsDir() {
|
||||||
"taskInfo": base.Json{
|
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
||||||
"contentInfoList": contentInfoList,
|
} else {
|
||||||
"catalogInfoList": catalogInfoList,
|
contentInfoList = append(contentInfoList, srcObj.GetID())
|
||||||
"newCatalogID": dstDir.GetID(),
|
}
|
||||||
|
data := base.Json{
|
||||||
|
"createBatchOprTaskReq": base.Json{
|
||||||
|
"taskType": 3,
|
||||||
|
"actionType": 309,
|
||||||
|
"taskInfo": base.Json{
|
||||||
|
"contentInfoList": contentInfoList,
|
||||||
|
"catalogInfoList": catalogInfoList,
|
||||||
|
"newCatalogID": dstDir.GetID(),
|
||||||
|
},
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"commonAccountInfo": base.Json{
|
}
|
||||||
"account": d.Account,
|
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||||
"accountType": 1,
|
_, err = d.post(pathname, data, nil)
|
||||||
},
|
default:
|
||||||
},
|
err = errs.NotImplement
|
||||||
}
|
}
|
||||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
|
||||||
_, err := d.post(pathname, data, nil)
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
var contentInfoList []string
|
switch d.Addition.Type {
|
||||||
var catalogInfoList []string
|
case MetaPersonalNew:
|
||||||
if obj.IsDir() {
|
data := base.Json{
|
||||||
catalogInfoList = append(catalogInfoList, obj.GetID())
|
"fileIds": []string{obj.GetID()},
|
||||||
} else {
|
|
||||||
contentInfoList = append(contentInfoList, obj.GetID())
|
|
||||||
}
|
|
||||||
data := base.Json{
|
|
||||||
"createBatchOprTaskReq": base.Json{
|
|
||||||
"taskType": 2,
|
|
||||||
"actionType": 201,
|
|
||||||
"taskInfo": base.Json{
|
|
||||||
"newCatalogID": "",
|
|
||||||
"contentInfoList": contentInfoList,
|
|
||||||
"catalogInfoList": catalogInfoList,
|
|
||||||
},
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.Account,
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
|
||||||
if d.isFamily() {
|
|
||||||
data = base.Json{
|
|
||||||
"catalogList": catalogInfoList,
|
|
||||||
"contentList": contentInfoList,
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.Account,
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
"sourceCatalogType": 1002,
|
|
||||||
"taskType": 2,
|
|
||||||
}
|
}
|
||||||
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
|
pathname := "/hcy/recyclebin/batchTrash"
|
||||||
|
_, err := d.personalPost(pathname, data, nil)
|
||||||
|
return err
|
||||||
|
case MetaPersonal:
|
||||||
|
fallthrough
|
||||||
|
case MetaFamily:
|
||||||
|
var contentInfoList []string
|
||||||
|
var catalogInfoList []string
|
||||||
|
if obj.IsDir() {
|
||||||
|
catalogInfoList = append(catalogInfoList, obj.GetID())
|
||||||
|
} else {
|
||||||
|
contentInfoList = append(contentInfoList, obj.GetID())
|
||||||
|
}
|
||||||
|
data := base.Json{
|
||||||
|
"createBatchOprTaskReq": base.Json{
|
||||||
|
"taskType": 2,
|
||||||
|
"actionType": 201,
|
||||||
|
"taskInfo": base.Json{
|
||||||
|
"newCatalogID": "",
|
||||||
|
"contentInfoList": contentInfoList,
|
||||||
|
"catalogInfoList": catalogInfoList,
|
||||||
|
},
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||||
|
if d.isFamily() {
|
||||||
|
data = base.Json{
|
||||||
|
"catalogList": catalogInfoList,
|
||||||
|
"contentList": contentInfoList,
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
|
"sourceCatalogType": 1002,
|
||||||
|
"taskType": 2,
|
||||||
|
}
|
||||||
|
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||||
|
}
|
||||||
|
_, err := d.post(pathname, data, nil)
|
||||||
|
return err
|
||||||
|
default:
|
||||||
|
return errs.NotImplement
|
||||||
}
|
}
|
||||||
_, err := d.post(pathname, data, nil)
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -257,94 +366,208 @@ func getPartSize(size int64) int64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
data := base.Json{
|
switch d.Addition.Type {
|
||||||
"manualRename": 2,
|
case MetaPersonalNew:
|
||||||
"operation": 0,
|
var err error
|
||||||
"fileCount": 1,
|
fullHash := stream.GetHash().GetHash(utils.SHA256)
|
||||||
"totalSize": 0, // 去除上传大小限制
|
if len(fullHash) <= 0 {
|
||||||
"uploadContentList": []base.Json{{
|
tmpF, err := stream.CacheFullInTempFile()
|
||||||
"contentName": stream.GetName(),
|
if err != nil {
|
||||||
"contentSize": 0, // 去除上传大小限制
|
return err
|
||||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
}
|
||||||
}},
|
fullHash, err = utils.HashFile(utils.SHA256, tmpF)
|
||||||
"parentCatalogID": dstDir.GetID(),
|
if err != nil {
|
||||||
"newCatalogName": "",
|
return err
|
||||||
"commonAccountInfo": base.Json{
|
}
|
||||||
"account": d.Account,
|
}
|
||||||
"accountType": 1,
|
// return errs.NotImplement
|
||||||
},
|
data := base.Json{
|
||||||
}
|
"contentHash": fullHash,
|
||||||
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
|
"contentHashAlgorithm": "SHA256",
|
||||||
if d.isFamily() {
|
"contentType": "application/octet-stream",
|
||||||
data = d.newJson(base.Json{
|
"parallelUpload": false,
|
||||||
"fileCount": 1,
|
"partInfos": []base.Json{{
|
||||||
"manualRename": 2,
|
"parallelHashCtx": base.Json{
|
||||||
"operation": 0,
|
"partOffset": 0,
|
||||||
"path": "",
|
},
|
||||||
"seqNo": "",
|
"partNumber": 1,
|
||||||
"totalSize": 0,
|
"partSize": stream.GetSize(),
|
||||||
"uploadContentList": []base.Json{{
|
|
||||||
"contentName": stream.GetName(),
|
|
||||||
"contentSize": 0,
|
|
||||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
|
||||||
}},
|
}},
|
||||||
})
|
"size": stream.GetSize(),
|
||||||
pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
|
"parentFileId": dstDir.GetID(),
|
||||||
return errs.NotImplement
|
"name": stream.GetName(),
|
||||||
}
|
"type": "file",
|
||||||
var resp UploadResp
|
"fileRenameMode": "auto_rename",
|
||||||
_, err := d.post(pathname, data, &resp)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Progress
|
|
||||||
p := driver.NewProgress(stream.GetSize(), up)
|
|
||||||
|
|
||||||
var partSize = getPartSize(stream.GetSize())
|
|
||||||
part := (stream.GetSize() + partSize - 1) / partSize
|
|
||||||
if part == 0 {
|
|
||||||
part = 1
|
|
||||||
}
|
|
||||||
for i := int64(0); i < part; i++ {
|
|
||||||
if utils.IsCanceled(ctx) {
|
|
||||||
return ctx.Err()
|
|
||||||
}
|
}
|
||||||
|
pathname := "/hcy/file/create"
|
||||||
start := i * partSize
|
var resp PersonalUploadResp
|
||||||
byteSize := stream.GetSize() - start
|
_, err = d.personalPost(pathname, data, &resp)
|
||||||
if byteSize > partSize {
|
|
||||||
byteSize = partSize
|
|
||||||
}
|
|
||||||
|
|
||||||
limitReader := io.LimitReader(stream, byteSize)
|
|
||||||
// Update Progress
|
|
||||||
r := io.TeeReader(limitReader, p)
|
|
||||||
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if resp.Data.Exist || resp.Data.RapidUpload {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Progress
|
||||||
|
p := driver.NewProgress(stream.GetSize(), up)
|
||||||
|
|
||||||
|
// Update Progress
|
||||||
|
r := io.TeeReader(stream, p)
|
||||||
|
|
||||||
|
req, err := http.NewRequest("PUT", resp.Data.PartInfos[0].UploadUrl, r)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
req = req.WithContext(ctx)
|
req = req.WithContext(ctx)
|
||||||
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
|
req.Header.Set("Content-Type", "application/octet-stream")
|
||||||
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
|
req.Header.Set("Content-Length", fmt.Sprint(stream.GetSize()))
|
||||||
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
|
req.Header.Set("Origin", "https://yun.139.com")
|
||||||
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
|
req.Header.Set("Referer", "https://yun.139.com/")
|
||||||
req.Header.Set("rangeType", "0")
|
req.ContentLength = stream.GetSize()
|
||||||
req.ContentLength = byteSize
|
|
||||||
|
|
||||||
res, err := base.HttpClient.Do(req)
|
res, err := base.HttpClient.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = res.Body.Close()
|
_ = res.Body.Close()
|
||||||
log.Debugf("%+v", res)
|
log.Debugf("%+v", res)
|
||||||
if res.StatusCode != http.StatusOK {
|
if res.StatusCode != http.StatusOK {
|
||||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
data = base.Json{
|
||||||
|
"contentHash": fullHash,
|
||||||
|
"contentHashAlgorithm": "SHA256",
|
||||||
|
"fileId": resp.Data.FileId,
|
||||||
|
"uploadId": resp.Data.UploadId,
|
||||||
|
}
|
||||||
|
_, err = d.personalPost("/hcy/file/complete", data, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case MetaPersonal:
|
||||||
|
fallthrough
|
||||||
|
case MetaFamily:
|
||||||
|
data := base.Json{
|
||||||
|
"manualRename": 2,
|
||||||
|
"operation": 0,
|
||||||
|
"fileCount": 1,
|
||||||
|
"totalSize": 0, // 去除上传大小限制
|
||||||
|
"uploadContentList": []base.Json{{
|
||||||
|
"contentName": stream.GetName(),
|
||||||
|
"contentSize": 0, // 去除上传大小限制
|
||||||
|
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||||
|
}},
|
||||||
|
"parentCatalogID": dstDir.GetID(),
|
||||||
|
"newCatalogName": "",
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
|
||||||
|
if d.isFamily() {
|
||||||
|
// data = d.newJson(base.Json{
|
||||||
|
// "fileCount": 1,
|
||||||
|
// "manualRename": 2,
|
||||||
|
// "operation": 0,
|
||||||
|
// "path": "",
|
||||||
|
// "seqNo": "",
|
||||||
|
// "totalSize": 0,
|
||||||
|
// "uploadContentList": []base.Json{{
|
||||||
|
// "contentName": stream.GetName(),
|
||||||
|
// "contentSize": 0,
|
||||||
|
// // "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||||
|
// }},
|
||||||
|
// })
|
||||||
|
// pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
|
||||||
|
return errs.NotImplement
|
||||||
|
}
|
||||||
|
var resp UploadResp
|
||||||
|
_, err := d.post(pathname, data, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Progress
|
||||||
|
p := driver.NewProgress(stream.GetSize(), up)
|
||||||
|
|
||||||
|
var partSize = getPartSize(stream.GetSize())
|
||||||
|
part := (stream.GetSize() + partSize - 1) / partSize
|
||||||
|
if part == 0 {
|
||||||
|
part = 1
|
||||||
|
}
|
||||||
|
for i := int64(0); i < part; i++ {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
start := i * partSize
|
||||||
|
byteSize := stream.GetSize() - start
|
||||||
|
if byteSize > partSize {
|
||||||
|
byteSize = partSize
|
||||||
|
}
|
||||||
|
|
||||||
|
limitReader := io.LimitReader(stream, byteSize)
|
||||||
|
// Update Progress
|
||||||
|
r := io.TeeReader(limitReader, p)
|
||||||
|
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
|
||||||
|
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
|
||||||
|
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
|
||||||
|
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
|
||||||
|
req.Header.Set("rangeType", "0")
|
||||||
|
req.ContentLength = byteSize
|
||||||
|
|
||||||
|
res, err := base.HttpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_ = res.Body.Close()
|
||||||
|
log.Debugf("%+v", res)
|
||||||
|
if res.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return errs.NotImplement
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Yun139) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||||
|
switch d.Addition.Type {
|
||||||
|
case MetaPersonalNew:
|
||||||
|
var resp base.Json
|
||||||
|
var uri string
|
||||||
|
data := base.Json{
|
||||||
|
"category": "video",
|
||||||
|
"fileId": args.Obj.GetID(),
|
||||||
|
}
|
||||||
|
switch args.Method {
|
||||||
|
case "video_preview":
|
||||||
|
uri = "/hcy/videoPreview/getPreviewInfo"
|
||||||
|
default:
|
||||||
|
return nil, errs.NotSupport
|
||||||
|
}
|
||||||
|
_, err := d.personalPost(uri, data, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return resp["data"], nil
|
||||||
|
default:
|
||||||
|
return nil, errs.NotImplement
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Yun139)(nil)
|
var _ driver.Driver = (*Yun139)(nil)
|
||||||
|
@ -9,7 +9,7 @@ type Addition struct {
|
|||||||
//Account string `json:"account" required:"true"`
|
//Account string `json:"account" required:"true"`
|
||||||
Authorization string `json:"authorization" type:"text" required:"true"`
|
Authorization string `json:"authorization" type:"text" required:"true"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
|
Type string `json:"type" type:"select" options:"personal,family,personal_new" default:"personal"`
|
||||||
CloudID string `json:"cloud_id"`
|
CloudID string `json:"cloud_id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,15 @@
|
|||||||
package _139
|
package _139
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/xml"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
MetaPersonal string = "personal"
|
||||||
|
MetaFamily string = "family"
|
||||||
|
MetaPersonalNew string = "personal_new"
|
||||||
|
)
|
||||||
|
|
||||||
type BaseResp struct {
|
type BaseResp struct {
|
||||||
Success bool `json:"success"`
|
Success bool `json:"success"`
|
||||||
Code string `json:"code"`
|
Code string `json:"code"`
|
||||||
@ -185,3 +195,51 @@ type QueryContentListResp struct {
|
|||||||
RecallContent interface{} `json:"recallContent"`
|
RecallContent interface{} `json:"recallContent"`
|
||||||
} `json:"data"`
|
} `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PersonalThumbnail struct {
|
||||||
|
Style string `json:"style"`
|
||||||
|
Url string `json:"url"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PersonalFileItem struct {
|
||||||
|
FileId string `json:"fileId"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
CreatedAt string `json:"createdAt"`
|
||||||
|
UpdatedAt string `json:"updatedAt"`
|
||||||
|
Thumbnails []PersonalThumbnail `json:"thumbnailUrls"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PersonalListResp struct {
|
||||||
|
BaseResp
|
||||||
|
Data struct {
|
||||||
|
Items []PersonalFileItem `json:"items"`
|
||||||
|
NextPageCursor string `json:"nextPageCursor"`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type PersonalPartInfo struct {
|
||||||
|
PartNumber int `json:"partNumber"`
|
||||||
|
UploadUrl string `json:"uploadUrl"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PersonalUploadResp struct {
|
||||||
|
BaseResp
|
||||||
|
Data struct {
|
||||||
|
FileId string `json:"fileId"`
|
||||||
|
PartInfos []PersonalPartInfo `json:"partInfos"`
|
||||||
|
Exist bool `json:"exist"`
|
||||||
|
RapidUpload bool `json:"rapidUpload"`
|
||||||
|
UploadId string `json:"uploadId"`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type RefreshTokenResp struct {
|
||||||
|
XMLName xml.Name `xml:"root"`
|
||||||
|
Return string `xml:"return"`
|
||||||
|
Token string `xml:"token"`
|
||||||
|
Expiretime int32 `xml:"expiretime"`
|
||||||
|
AccessToken string `xml:"accessToken"`
|
||||||
|
Desc string `xml:"desc"`
|
||||||
|
}
|
||||||
|
@ -15,6 +15,7 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils/random"
|
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@ -52,6 +53,32 @@ func getTime(t string) time.Time {
|
|||||||
return stamp
|
return stamp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Yun139) refreshToken() error {
|
||||||
|
url := "https://aas.caiyun.feixin.10086.cn:443/tellin/authTokenRefresh.do"
|
||||||
|
var resp RefreshTokenResp
|
||||||
|
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
decodeStr := string(decode)
|
||||||
|
splits := strings.Split(decodeStr, ":")
|
||||||
|
reqBody := "<root><token>" + splits[2] + "</token><account>" + splits[1] + "</account><clienttype>656</clienttype></root>"
|
||||||
|
_, err = base.RestyClient.R().
|
||||||
|
ForceContentType("application/xml").
|
||||||
|
SetBody(reqBody).
|
||||||
|
SetResult(&resp).
|
||||||
|
Post(url)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.Return != "0" {
|
||||||
|
return fmt.Errorf("failed to refresh token: %s", resp.Desc)
|
||||||
|
}
|
||||||
|
d.Authorization = base64.StdEncoding.EncodeToString([]byte(splits[0] + ":" + splits[1] + ":" + resp.Token))
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (d *Yun139) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *Yun139) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
url := "https://yun.139.com" + pathname
|
url := "https://yun.139.com" + pathname
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
@ -252,3 +279,154 @@ func unicode(str string) string {
|
|||||||
textUnquoted := textQuoted[1 : len(textQuoted)-1]
|
textUnquoted := textQuoted[1 : len(textQuoted)-1]
|
||||||
return textUnquoted
|
return textUnquoted
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Yun139) personalRequest(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
|
url := "https://personal-kd-njs.yun.139.com" + pathname
|
||||||
|
req := base.RestyClient.R()
|
||||||
|
randStr := random.String(16)
|
||||||
|
ts := time.Now().Format("2006-01-02 15:04:05")
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
body, err := utils.Json.Marshal(req.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sign := calSign(string(body), ts, randStr)
|
||||||
|
svcType := "1"
|
||||||
|
if d.isFamily() {
|
||||||
|
svcType = "2"
|
||||||
|
}
|
||||||
|
req.SetHeaders(map[string]string{
|
||||||
|
"Accept": "application/json, text/plain, */*",
|
||||||
|
"Authorization": "Basic " + d.Authorization,
|
||||||
|
"Caller": "web",
|
||||||
|
"Cms-Device": "default",
|
||||||
|
"Mcloud-Channel": "1000101",
|
||||||
|
"Mcloud-Client": "10701",
|
||||||
|
"Mcloud-Route": "001",
|
||||||
|
"Mcloud-Sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||||
|
"Mcloud-Version": "7.13.0",
|
||||||
|
"Origin": "https://yun.139.com",
|
||||||
|
"Referer": "https://yun.139.com/w/",
|
||||||
|
"x-DeviceInfo": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||||
|
"x-huawei-channelSrc": "10000034",
|
||||||
|
"x-inner-ntwk": "2",
|
||||||
|
"x-m4c-caller": "PC",
|
||||||
|
"x-m4c-src": "10002",
|
||||||
|
"x-SvcType": svcType,
|
||||||
|
"X-Yun-Api-Version": "v1",
|
||||||
|
"X-Yun-App-Channel": "10000034",
|
||||||
|
"X-Yun-Channel-Source": "10000034",
|
||||||
|
"X-Yun-Client-Info": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
|
||||||
|
"X-Yun-Module-Type": "100",
|
||||||
|
"X-Yun-Svc-Type": "1",
|
||||||
|
})
|
||||||
|
|
||||||
|
var e BaseResp
|
||||||
|
req.SetResult(&e)
|
||||||
|
res, err := req.Execute(method, url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
log.Debugln(res.String())
|
||||||
|
if !e.Success {
|
||||||
|
return nil, errors.New(e.Message)
|
||||||
|
}
|
||||||
|
if resp != nil {
|
||||||
|
err = utils.Json.Unmarshal(res.Body(), resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res.Body(), nil
|
||||||
|
}
|
||||||
|
func (d *Yun139) personalPost(pathname string, data interface{}, resp interface{}) ([]byte, error) {
|
||||||
|
return d.personalRequest(pathname, http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(data)
|
||||||
|
}, resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPersonalTime(t string) time.Time {
|
||||||
|
stamp, err := time.ParseInLocation("2006-01-02T15:04:05.999-07:00", t, utils.CNLoc)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return stamp
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Yun139) personalGetFiles(fileId string) ([]model.Obj, error) {
|
||||||
|
files := make([]model.Obj, 0)
|
||||||
|
nextPageCursor := ""
|
||||||
|
for {
|
||||||
|
data := base.Json{
|
||||||
|
"imageThumbnailStyleList": []string{"Small", "Large"},
|
||||||
|
"orderBy": "updated_at",
|
||||||
|
"orderDirection": "DESC",
|
||||||
|
"pageInfo": base.Json{
|
||||||
|
"pageCursor": nextPageCursor,
|
||||||
|
"pageSize": 100,
|
||||||
|
},
|
||||||
|
"parentFileId": fileId,
|
||||||
|
}
|
||||||
|
var resp PersonalListResp
|
||||||
|
_, err := d.personalPost("/hcy/file/list", data, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
nextPageCursor = resp.Data.NextPageCursor
|
||||||
|
for _, item := range resp.Data.Items {
|
||||||
|
var isFolder = (item.Type == "folder")
|
||||||
|
var f model.Obj
|
||||||
|
if isFolder {
|
||||||
|
f = &model.Object{
|
||||||
|
ID: item.FileId,
|
||||||
|
Name: item.Name,
|
||||||
|
Size: 0,
|
||||||
|
Modified: getPersonalTime(item.UpdatedAt),
|
||||||
|
Ctime: getPersonalTime(item.CreatedAt),
|
||||||
|
IsFolder: isFolder,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var Thumbnails = item.Thumbnails
|
||||||
|
var ThumbnailUrl string
|
||||||
|
if len(Thumbnails) > 0 {
|
||||||
|
ThumbnailUrl = Thumbnails[len(Thumbnails)-1].Url
|
||||||
|
}
|
||||||
|
f = &model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
ID: item.FileId,
|
||||||
|
Name: item.Name,
|
||||||
|
Size: item.Size,
|
||||||
|
Modified: getPersonalTime(item.UpdatedAt),
|
||||||
|
Ctime: getPersonalTime(item.CreatedAt),
|
||||||
|
IsFolder: isFolder,
|
||||||
|
},
|
||||||
|
Thumbnail: model.Thumbnail{Thumbnail: ThumbnailUrl},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
files = append(files, f)
|
||||||
|
}
|
||||||
|
if len(nextPageCursor) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Yun139) personalGetLink(fileId string) (string, error) {
|
||||||
|
data := base.Json{
|
||||||
|
"fileId": fileId,
|
||||||
|
}
|
||||||
|
res, err := d.personalPost("/hcy/file/getDownloadUrl",
|
||||||
|
data, nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
var cdnUrl = jsoniter.Get(res, "data", "cdnUrl").ToString()
|
||||||
|
if cdnUrl != "" {
|
||||||
|
return cdnUrl, nil
|
||||||
|
} else {
|
||||||
|
return jsoniter.Get(res, "data", "url").ToString(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -380,7 +380,7 @@ func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.F
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
up(int(i * 100 / count))
|
up(float64(i) * 100 / float64(count))
|
||||||
}
|
}
|
||||||
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
|
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
|
||||||
sliceMd5 := fileMd5
|
sliceMd5 := fileMd5
|
||||||
|
@ -513,7 +513,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
up(int(threadG.Success()) * 100 / count)
|
up(float64(threadG.Success()) * 100 / float64(count))
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -676,7 +676,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
up(int(threadG.Success()) * 100 / len(uploadUrls))
|
up(float64(threadG.Success()) * 100 / float64(len(uploadUrls)))
|
||||||
uploadProgress.UploadParts[i] = ""
|
uploadProgress.UploadParts[i] = ""
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
@ -812,7 +812,7 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
|
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
up(int(status.GetSize()/file.GetSize()) * 100)
|
up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
|
||||||
}
|
}
|
||||||
|
|
||||||
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
|
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
|
||||||
|
@ -8,6 +8,7 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
@ -174,13 +175,13 @@ func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
_, err := d.request("/fs/put", http.MethodPut, func(req *resty.Request) {
|
_, err := d.requestWithTimeout("/fs/put", http.MethodPut, func(req *resty.Request) {
|
||||||
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||||
SetHeader("Password", d.MetaPassword).
|
SetHeader("Password", d.MetaPassword).
|
||||||
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
||||||
SetContentLength(true).
|
SetContentLength(true).
|
||||||
SetBody(io.ReadCloser(stream))
|
SetBody(io.ReadCloser(stream))
|
||||||
})
|
}, time.Hour*6)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@ package alist_v3
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
@ -56,3 +57,33 @@ func (d *AListV3) request(api, method string, callback base.ReqCallback, retry .
|
|||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *AListV3) requestWithTimeout(api, method string, callback base.ReqCallback, timeout time.Duration, retry ...bool) ([]byte, error) {
|
||||||
|
url := d.Address + "/api" + api
|
||||||
|
client := base.NewRestyClient().SetTimeout(timeout)
|
||||||
|
req := client.R()
|
||||||
|
req.SetHeader("Authorization", d.Token)
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
res, err := req.Execute(method, url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
log.Debugf("[alist_v3] response body: %s", res.String())
|
||||||
|
if res.StatusCode() >= 400 {
|
||||||
|
return nil, fmt.Errorf("request failed, status: %s", res.Status())
|
||||||
|
}
|
||||||
|
code := utils.Json.Get(res.Body(), "code").ToInt()
|
||||||
|
if code != 200 {
|
||||||
|
if (code == 401 || code == 403) && !utils.IsBool(retry...) {
|
||||||
|
err = d.login()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return d.requestWithTimeout(api, method, callback, timeout, true)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(res.Body(), "message").ToString())
|
||||||
|
}
|
||||||
|
return res.Body(), nil
|
||||||
|
}
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
"math/big"
|
"math/big"
|
||||||
@ -15,6 +14,8 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -51,7 +52,7 @@ func (d *AliDrive) Init(ctx context.Context) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
// get driver id
|
// get driver id
|
||||||
res, err, _ := d.request("https://api.aliyundrive.com/v2/user/get", http.MethodPost, nil, nil)
|
res, err, _ := d.request("https://api.alipan.com/v2/user/get", http.MethodPost, nil, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -105,7 +106,7 @@ func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
|
|||||||
"file_id": file.GetID(),
|
"file_id": file.GetID(),
|
||||||
"expire_sec": 14400,
|
"expire_sec": 14400,
|
||||||
}
|
}
|
||||||
res, err, _ := d.request("https://api.aliyundrive.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
|
res, err, _ := d.request("https://api.alipan.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data)
|
||||||
}, nil)
|
}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -113,14 +114,14 @@ func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
|
|||||||
}
|
}
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
"Referer": []string{"https://www.aliyundrive.com/"},
|
"Referer": []string{"https://www.alipan.com/"},
|
||||||
},
|
},
|
||||||
URL: utils.Json.Get(res, "url").ToString(),
|
URL: utils.Json.Get(res, "url").ToString(),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliDrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *AliDrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
_, err, _ := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
_, err, _ := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"check_name_mode": "refuse",
|
"check_name_mode": "refuse",
|
||||||
"drive_id": d.DriveId,
|
"drive_id": d.DriveId,
|
||||||
@ -138,7 +139,7 @@ func (d *AliDrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliDrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *AliDrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
_, err, _ := d.request("https://api.aliyundrive.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
|
_, err, _ := d.request("https://api.alipan.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"check_name_mode": "refuse",
|
"check_name_mode": "refuse",
|
||||||
"drive_id": d.DriveId,
|
"drive_id": d.DriveId,
|
||||||
@ -155,7 +156,7 @@ func (d *AliDrive) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliDrive) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *AliDrive) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
_, err, _ := d.request("https://api.aliyundrive.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
|
_, err, _ := d.request("https://api.alipan.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"drive_id": d.DriveId,
|
"drive_id": d.DriveId,
|
||||||
"file_id": obj.GetID(),
|
"file_id": obj.GetID(),
|
||||||
@ -215,7 +216,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
|
|||||||
}
|
}
|
||||||
|
|
||||||
var resp UploadResp
|
var resp UploadResp
|
||||||
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
_, err, e := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(reqBody)
|
req.SetBody(reqBody)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
|
|
||||||
@ -269,7 +270,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
|
|||||||
n, _ := io.NewSectionReader(localFile, o.Int64(), 8).Read(buf[:8])
|
n, _ := io.NewSectionReader(localFile, o.Int64(), 8).Read(buf[:8])
|
||||||
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
|
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
|
||||||
|
|
||||||
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
_, err, e := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(reqBody)
|
req.SetBody(reqBody)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil && e.Code != "PreHashMatched" {
|
if err != nil && e.Code != "PreHashMatched" {
|
||||||
@ -304,11 +305,11 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
|
|||||||
}
|
}
|
||||||
res.Body.Close()
|
res.Body.Close()
|
||||||
if count > 0 {
|
if count > 0 {
|
||||||
up(i * 100 / count)
|
up(float64(i) * 100 / float64(count))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
var resp2 base.Json
|
var resp2 base.Json
|
||||||
_, err, e = d.request("https://api.aliyundrive.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
|
_, err, e = d.request("https://api.alipan.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"drive_id": d.DriveId,
|
"drive_id": d.DriveId,
|
||||||
"file_id": resp.FileId,
|
"file_id": resp.FileId,
|
||||||
@ -333,10 +334,10 @@ func (d *AliDrive) Other(ctx context.Context, args model.OtherArgs) (interface{}
|
|||||||
}
|
}
|
||||||
switch args.Method {
|
switch args.Method {
|
||||||
case "doc_preview":
|
case "doc_preview":
|
||||||
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
url = "https://api.alipan.com/v2/file/get_office_preview_url"
|
||||||
data["access_token"] = d.AccessToken
|
data["access_token"] = d.AccessToken
|
||||||
case "video_preview":
|
case "video_preview":
|
||||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
url = "https://api.alipan.com/v2/file/get_video_preview_play_info"
|
||||||
data["category"] = "live_transcoding"
|
data["category"] = "live_transcoding"
|
||||||
data["url_expire_sec"] = 14400
|
data["url_expire_sec"] = 14400
|
||||||
default:
|
default:
|
||||||
|
@ -26,7 +26,7 @@ func (d *AliDrive) createSession() error {
|
|||||||
state.retry = 0
|
state.retry = 0
|
||||||
return fmt.Errorf("createSession failed after three retries")
|
return fmt.Errorf("createSession failed after three retries")
|
||||||
}
|
}
|
||||||
_, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
|
_, err, _ := d.request("https://api.alipan.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"deviceName": "samsung",
|
"deviceName": "samsung",
|
||||||
"modelName": "SM-G9810",
|
"modelName": "SM-G9810",
|
||||||
@ -42,7 +42,7 @@ func (d *AliDrive) createSession() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// func (d *AliDrive) renewSession() error {
|
// func (d *AliDrive) renewSession() error {
|
||||||
// _, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
|
// _, err, _ := d.request("https://api.alipan.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
|
||||||
// return err
|
// return err
|
||||||
// }
|
// }
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ func (d *AliDrive) sign() {
|
|||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
|
|
||||||
func (d *AliDrive) refreshToken() error {
|
func (d *AliDrive) refreshToken() error {
|
||||||
url := "https://auth.aliyundrive.com/v2/account/token"
|
url := "https://auth.alipan.com/v2/account/token"
|
||||||
var resp base.TokenResp
|
var resp base.TokenResp
|
||||||
var e RespErr
|
var e RespErr
|
||||||
_, err := base.RestyClient.R().
|
_, err := base.RestyClient.R().
|
||||||
@ -85,7 +85,7 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
|
|||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
state, ok := global.Load(d.UserID)
|
state, ok := global.Load(d.UserID)
|
||||||
if !ok {
|
if !ok {
|
||||||
if url == "https://api.aliyundrive.com/v2/user/get" {
|
if url == "https://api.alipan.com/v2/user/get" {
|
||||||
state = &State{}
|
state = &State{}
|
||||||
} else {
|
} else {
|
||||||
return nil, fmt.Errorf("can't load user state, user_id: %s", d.UserID), RespErr{}
|
return nil, fmt.Errorf("can't load user state, user_id: %s", d.UserID), RespErr{}
|
||||||
@ -94,8 +94,8 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
|
|||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Authorization": "Bearer\t" + d.AccessToken,
|
"Authorization": "Bearer\t" + d.AccessToken,
|
||||||
"content-type": "application/json",
|
"content-type": "application/json",
|
||||||
"origin": "https://www.aliyundrive.com",
|
"origin": "https://www.alipan.com",
|
||||||
"Referer": "https://aliyundrive.com/",
|
"Referer": "https://alipan.com/",
|
||||||
"X-Signature": state.signature,
|
"X-Signature": state.signature,
|
||||||
"x-request-id": uuid.NewString(),
|
"x-request-id": uuid.NewString(),
|
||||||
"X-Canary": "client=Android,app=adrive,version=v4.1.0",
|
"X-Canary": "client=Android,app=adrive,version=v4.1.0",
|
||||||
@ -158,7 +158,7 @@ func (d *AliDrive) getFiles(fileId string) ([]File, error) {
|
|||||||
"video_thumbnail_process": "video/snapshot,t_0,f_jpg,ar_auto,w_300",
|
"video_thumbnail_process": "video/snapshot,t_0,f_jpg,ar_auto,w_300",
|
||||||
"url_expire_sec": 14400,
|
"url_expire_sec": 14400,
|
||||||
}
|
}
|
||||||
_, err, _ := d.request("https://api.aliyundrive.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
|
_, err, _ := d.request("https://api.alipan.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
|
|
||||||
@ -172,7 +172,7 @@ func (d *AliDrive) getFiles(fileId string) ([]File, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliDrive) batch(srcId, dstId string, url string) error {
|
func (d *AliDrive) batch(srcId, dstId string, url string) error {
|
||||||
res, err, _ := d.request("https://api.aliyundrive.com/v3/batch", http.MethodPost, func(req *resty.Request) {
|
res, err, _ := d.request("https://api.alipan.com/v3/batch", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"requests": []base.Json{
|
"requests": []base.Json{
|
||||||
{
|
{
|
||||||
|
@ -93,7 +93,7 @@ func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link
|
|||||||
}
|
}
|
||||||
url = utils.Json.Get(res, "streamsUrl", d.LIVPDownloadFormat).ToString()
|
url = utils.Json.Get(res, "streamsUrl", d.LIVPDownloadFormat).ToString()
|
||||||
}
|
}
|
||||||
exp := time.Hour
|
exp := time.Minute
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: url,
|
URL: url,
|
||||||
Expiration: &exp,
|
Expiration: &exp,
|
||||||
|
@ -36,7 +36,7 @@ var config = driver.Config{
|
|||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
return &AliyundriveOpen{
|
return &AliyundriveOpen{
|
||||||
base: "https://openapi.aliyundrive.com",
|
base: "https://openapi.alipan.com",
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -258,7 +258,7 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
offset += partSize
|
offset += partSize
|
||||||
up(i * 100 / count)
|
up(float64(i*100) / float64(count))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
|
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
|
||||||
|
@ -105,7 +105,7 @@ func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Lin
|
|||||||
"share_id": d.ShareId,
|
"share_id": d.ShareId,
|
||||||
}
|
}
|
||||||
var resp ShareLinkResp
|
var resp ShareLinkResp
|
||||||
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api.alipan.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
|
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -113,7 +113,7 @@ func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Lin
|
|||||||
}
|
}
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
"Referer": []string{"https://www.aliyundrive.com/"},
|
"Referer": []string{"https://www.alipan.com/"},
|
||||||
},
|
},
|
||||||
URL: resp.DownloadUrl,
|
URL: resp.DownloadUrl,
|
||||||
}, nil
|
}, nil
|
||||||
@ -128,9 +128,9 @@ func (d *AliyundriveShare) Other(ctx context.Context, args model.OtherArgs) (int
|
|||||||
}
|
}
|
||||||
switch args.Method {
|
switch args.Method {
|
||||||
case "doc_preview":
|
case "doc_preview":
|
||||||
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
url = "https://api.alipan.com/v2/file/get_office_preview_url"
|
||||||
case "video_preview":
|
case "video_preview":
|
||||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
url = "https://api.alipan.com/v2/file/get_video_preview_play_info"
|
||||||
data["category"] = "live_transcoding"
|
data["category"] = "live_transcoding"
|
||||||
default:
|
default:
|
||||||
return nil, errs.NotSupport
|
return nil, errs.NotSupport
|
||||||
|
@ -16,7 +16,7 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (d *AliyundriveShare) refreshToken() error {
|
func (d *AliyundriveShare) refreshToken() error {
|
||||||
url := "https://auth.aliyundrive.com/v2/account/token"
|
url := "https://auth.alipan.com/v2/account/token"
|
||||||
var resp base.TokenResp
|
var resp base.TokenResp
|
||||||
var e ErrorResp
|
var e ErrorResp
|
||||||
_, err := base.RestyClient.R().
|
_, err := base.RestyClient.R().
|
||||||
@ -47,7 +47,7 @@ func (d *AliyundriveShare) getShareToken() error {
|
|||||||
var resp ShareTokenResp
|
var resp ShareTokenResp
|
||||||
_, err := base.RestyClient.R().
|
_, err := base.RestyClient.R().
|
||||||
SetResult(&resp).SetError(&e).SetBody(data).
|
SetResult(&resp).SetError(&e).SetBody(data).
|
||||||
Post("https://api.aliyundrive.com/v2/share_link/get_share_token")
|
Post("https://api.alipan.com/v2/share_link/get_share_token")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -116,7 +116,7 @@ func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
|
|||||||
SetHeader("x-share-token", d.ShareToken).
|
SetHeader("x-share-token", d.ShareToken).
|
||||||
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
||||||
SetResult(&resp).SetError(&e).SetBody(data).
|
SetResult(&resp).SetError(&e).SetBody(data).
|
||||||
Post("https://api.aliyundrive.com/adrive/v3/file/list")
|
Post("https://api.alipan.com/adrive/v3/file/list")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ package drivers
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
_ "github.com/alist-org/alist/v3/drivers/115"
|
_ "github.com/alist-org/alist/v3/drivers/115"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/115_share"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/123"
|
_ "github.com/alist-org/alist/v3/drivers/123"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/123_link"
|
_ "github.com/alist-org/alist/v3/drivers/123_link"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/123_share"
|
_ "github.com/alist-org/alist/v3/drivers/123_share"
|
||||||
@ -17,12 +18,14 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
|
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
|
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
|
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/chaoxing"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/ilanzou"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
|
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/lanzou"
|
_ "github.com/alist-org/alist/v3/drivers/lanzou"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||||
@ -34,6 +37,7 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
|
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/quqi"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/s3"
|
_ "github.com/alist-org/alist/v3/drivers/s3"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/sftp"
|
_ "github.com/alist-org/alist/v3/drivers/sftp"
|
||||||
@ -45,6 +49,7 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/uss"
|
_ "github.com/alist-org/alist/v3/drivers/uss"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/virtual"
|
_ "github.com/alist-org/alist/v3/drivers/virtual"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/vtencent"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/webdav"
|
_ "github.com/alist-org/alist/v3/drivers/webdav"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/weiyun"
|
_ "github.com/alist-org/alist/v3/drivers/weiyun"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/wopan"
|
_ "github.com/alist-org/alist/v3/drivers/wopan"
|
||||||
|
@ -278,7 +278,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
up(int(threadG.Success()) * 100 / len(precreateResp.BlockList))
|
up(float64(threadG.Success()) * 100 / float64(len(precreateResp.BlockList)))
|
||||||
precreateResp.BlockList[i] = -1
|
precreateResp.BlockList[i] = -1
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
@ -329,7 +329,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
up(int(threadG.Success()) * 100 / len(precreateResp.BlockList))
|
up(float64(threadG.Success()) * 100 / float64(len(precreateResp.BlockList)))
|
||||||
precreateResp.BlockList[i] = -1
|
precreateResp.BlockList[i] = -1
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
@ -33,6 +33,7 @@ func NewRestyClient() *resty.Client {
|
|||||||
client := resty.New().
|
client := resty.New().
|
||||||
SetHeader("user-agent", UserAgent).
|
SetHeader("user-agent", UserAgent).
|
||||||
SetRetryCount(3).
|
SetRetryCount(3).
|
||||||
|
SetRetryResetReaders(true).
|
||||||
SetTimeout(DefaultTimeout).
|
SetTimeout(DefaultTimeout).
|
||||||
SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||||
return client
|
return client
|
||||||
|
297
drivers/chaoxing/driver.go
Normal file
297
drivers/chaoxing/driver.go
Normal file
@ -0,0 +1,297 @@
|
|||||||
|
package chaoxing
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/cron"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
"google.golang.org/appengine/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ChaoXing struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
cron *cron.Cron
|
||||||
|
config driver.Config
|
||||||
|
conf Conf
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Config() driver.Config {
|
||||||
|
return d.config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) refreshCookie() error {
|
||||||
|
cookie, err := d.Login()
|
||||||
|
if err != nil {
|
||||||
|
d.Status = err.Error()
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
d.Addition.Cookie = cookie
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Init(ctx context.Context) error {
|
||||||
|
err := d.refreshCookie()
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, err.Error())
|
||||||
|
}
|
||||||
|
d.cron = cron.NewCron(time.Hour * 12)
|
||||||
|
d.cron.Do(func() {
|
||||||
|
err = d.refreshCookie()
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, err.Error())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Drop(ctx context.Context) error {
|
||||||
|
d.cron.Stop()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
files, err := d.GetFiles(dir.GetID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||||
|
return fileToObj(src), nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
var resp DownResp
|
||||||
|
ua := d.conf.ua
|
||||||
|
fileId := strings.Split(file.GetID(), "$")[1]
|
||||||
|
_, err := d.requestDownload("/screen/note_note/files/status/"+fileId, http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetHeader("User-Agent", ua)
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
u := resp.Download
|
||||||
|
return &model.Link{
|
||||||
|
URL: u,
|
||||||
|
Header: http.Header{
|
||||||
|
"Cookie": []string{d.Cookie},
|
||||||
|
"Referer": []string{d.conf.referer},
|
||||||
|
"User-Agent": []string{ua},
|
||||||
|
},
|
||||||
|
Concurrency: 2,
|
||||||
|
PartSize: 10 * utils.MB,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
|
query := map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"name": dirName,
|
||||||
|
"pid": parentDir.GetID(),
|
||||||
|
}
|
||||||
|
var resp ListFileResp
|
||||||
|
_, err := d.request("/pc/resource/addResourceFolder", http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(query)
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.Result != 1 {
|
||||||
|
msg := fmt.Sprintf("error:%s", resp.Msg)
|
||||||
|
return errors.New(msg)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
query := map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"folderIds": srcObj.GetID(),
|
||||||
|
"targetId": dstDir.GetID(),
|
||||||
|
}
|
||||||
|
if !srcObj.IsDir() {
|
||||||
|
query = map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"recIds": strings.Split(srcObj.GetID(), "$")[0],
|
||||||
|
"targetId": dstDir.GetID(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var resp ListFileResp
|
||||||
|
_, err := d.request("/pc/resource/moveResource", http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(query)
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !resp.Status {
|
||||||
|
msg := fmt.Sprintf("error:%s", resp.Msg)
|
||||||
|
return errors.New(msg)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
|
query := map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"folderId": srcObj.GetID(),
|
||||||
|
"name": newName,
|
||||||
|
}
|
||||||
|
path := "/pc/resource/updateResourceFolderName"
|
||||||
|
if !srcObj.IsDir() {
|
||||||
|
// path = "/pc/resource/updateResourceFileName"
|
||||||
|
// query = map[string]string{
|
||||||
|
// "bbsid": d.Addition.Bbsid,
|
||||||
|
// "recIds": strings.Split(srcObj.GetID(), "$")[0],
|
||||||
|
// "name": newName,
|
||||||
|
// }
|
||||||
|
return errors.New("此网盘不支持修改文件名")
|
||||||
|
}
|
||||||
|
var resp ListFileResp
|
||||||
|
_, err := d.request(path, http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(query)
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.Result != 1 {
|
||||||
|
msg := fmt.Sprintf("error:%s", resp.Msg)
|
||||||
|
return errors.New(msg)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
// TODO copy obj, optional
|
||||||
|
return errs.NotImplement
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
query := map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"folderIds": obj.GetID(),
|
||||||
|
}
|
||||||
|
path := "/pc/resource/deleteResourceFolder"
|
||||||
|
var resp ListFileResp
|
||||||
|
if !obj.IsDir() {
|
||||||
|
path = "/pc/resource/deleteResourceFile"
|
||||||
|
query = map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"recIds": strings.Split(obj.GetID(), "$")[0],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_, err := d.request(path, http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(query)
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.Result != 1 {
|
||||||
|
msg := fmt.Sprintf("error:%s", resp.Msg)
|
||||||
|
return errors.New(msg)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
var resp UploadDataRsp
|
||||||
|
_, err := d.request("https://noteyd.chaoxing.com/pc/files/getUploadConfig", http.MethodGet, func(req *resty.Request) {
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.Result != 1 {
|
||||||
|
return errors.New("get upload data error")
|
||||||
|
}
|
||||||
|
body := &bytes.Buffer{}
|
||||||
|
writer := multipart.NewWriter(body)
|
||||||
|
filePart, err := writer.CreateFormFile("file", stream.GetName())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = io.Copy(filePart, stream)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = writer.WriteField("_token", resp.Msg.Token)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = writer.WriteField("puid", fmt.Sprintf("%d", resp.Msg.Puid))
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println("Error writing param2 to request body:", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = writer.Close()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req, err := http.NewRequest("POST", "https://pan-yz.chaoxing.com/upload", body)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", writer.FormDataContentType())
|
||||||
|
req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
|
||||||
|
resps, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resps.Body.Close()
|
||||||
|
bodys, err := io.ReadAll(resps.Body)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
var fileRsp UploadFileDataRsp
|
||||||
|
err = json.Unmarshal(bodys, &fileRsp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if fileRsp.Msg != "success" {
|
||||||
|
return errors.New(fileRsp.Msg)
|
||||||
|
}
|
||||||
|
uploadDoneParam := UploadDoneParam{Key: fileRsp.ObjectID, Cataid: "100000019", Param: fileRsp.Data}
|
||||||
|
params, err := json.Marshal(uploadDoneParam)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
query := map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"pid": dstDir.GetID(),
|
||||||
|
"type": "yunpan",
|
||||||
|
"params": url.QueryEscape("[" + string(params) + "]"),
|
||||||
|
}
|
||||||
|
var respd ListFileResp
|
||||||
|
_, err = d.request("/pc/resource/addResource", http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(query)
|
||||||
|
}, &respd)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if respd.Result != 1 {
|
||||||
|
msg := fmt.Sprintf("error:%v", resp.Msg)
|
||||||
|
return errors.New(msg)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*ChaoXing)(nil)
|
47
drivers/chaoxing/meta.go
Normal file
47
drivers/chaoxing/meta.go
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
package chaoxing
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
// 此程序挂载的是超星小组网盘,需要代理才能使用;
|
||||||
|
// 登录超星后进入个人空间,进入小组,新建小组,点击进去。
|
||||||
|
// url中就有bbsid的参数,系统限制单文件大小2G,没有总容量限制
|
||||||
|
type Addition struct {
|
||||||
|
// 超星用户名及密码
|
||||||
|
UserName string `json:"user_name" required:"true"`
|
||||||
|
Password string `json:"password" required:"true"`
|
||||||
|
// 从自己新建的小组url里获取
|
||||||
|
Bbsid string `json:"bbsid" required:"true"`
|
||||||
|
driver.RootID
|
||||||
|
// 可不填,程序会自动登录获取
|
||||||
|
Cookie string `json:"cookie"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Conf struct {
|
||||||
|
ua string
|
||||||
|
referer string
|
||||||
|
api string
|
||||||
|
DowloadApi string
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &ChaoXing{
|
||||||
|
config: driver.Config{
|
||||||
|
Name: "ChaoXingGroupDrive",
|
||||||
|
OnlyProxy: true,
|
||||||
|
OnlyLocal: false,
|
||||||
|
DefaultRoot: "-1",
|
||||||
|
NoOverwriteUpload: true,
|
||||||
|
},
|
||||||
|
conf: Conf{
|
||||||
|
ua: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch",
|
||||||
|
referer: "https://chaoxing.com/",
|
||||||
|
api: "https://groupweb.chaoxing.com",
|
||||||
|
DowloadApi: "https://noteyd.chaoxing.com",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
276
drivers/chaoxing/types.go
Normal file
276
drivers/chaoxing/types.go
Normal file
@ -0,0 +1,276 @@
|
|||||||
|
package chaoxing
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Resp struct {
|
||||||
|
Result int `json:"result"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UserAuth struct {
|
||||||
|
GroupAuth struct {
|
||||||
|
AddData int `json:"addData"`
|
||||||
|
AddDataFolder int `json:"addDataFolder"`
|
||||||
|
AddLebel int `json:"addLebel"`
|
||||||
|
AddManager int `json:"addManager"`
|
||||||
|
AddMem int `json:"addMem"`
|
||||||
|
AddTopicFolder int `json:"addTopicFolder"`
|
||||||
|
AnonymousAddReply int `json:"anonymousAddReply"`
|
||||||
|
AnonymousAddTopic int `json:"anonymousAddTopic"`
|
||||||
|
BatchOperation int `json:"batchOperation"`
|
||||||
|
DelData int `json:"delData"`
|
||||||
|
DelDataFolder int `json:"delDataFolder"`
|
||||||
|
DelMem int `json:"delMem"`
|
||||||
|
DelTopicFolder int `json:"delTopicFolder"`
|
||||||
|
Dismiss int `json:"dismiss"`
|
||||||
|
ExamEnc string `json:"examEnc"`
|
||||||
|
GroupChat int `json:"groupChat"`
|
||||||
|
IsShowCircleChatButton int `json:"isShowCircleChatButton"`
|
||||||
|
IsShowCircleCloudButton int `json:"isShowCircleCloudButton"`
|
||||||
|
IsShowCompanyButton int `json:"isShowCompanyButton"`
|
||||||
|
Join int `json:"join"`
|
||||||
|
MemberShowRankSet int `json:"memberShowRankSet"`
|
||||||
|
ModifyDataFolder int `json:"modifyDataFolder"`
|
||||||
|
ModifyExpose int `json:"modifyExpose"`
|
||||||
|
ModifyName int `json:"modifyName"`
|
||||||
|
ModifyShowPic int `json:"modifyShowPic"`
|
||||||
|
ModifyTopicFolder int `json:"modifyTopicFolder"`
|
||||||
|
ModifyVisibleState int `json:"modifyVisibleState"`
|
||||||
|
OnlyMgrScoreSet int `json:"onlyMgrScoreSet"`
|
||||||
|
Quit int `json:"quit"`
|
||||||
|
SendNotice int `json:"sendNotice"`
|
||||||
|
ShowActivityManage int `json:"showActivityManage"`
|
||||||
|
ShowActivitySet int `json:"showActivitySet"`
|
||||||
|
ShowAttentionSet int `json:"showAttentionSet"`
|
||||||
|
ShowAutoClearStatus int `json:"showAutoClearStatus"`
|
||||||
|
ShowBarcode int `json:"showBarcode"`
|
||||||
|
ShowChatRoomSet int `json:"showChatRoomSet"`
|
||||||
|
ShowCircleActivitySet int `json:"showCircleActivitySet"`
|
||||||
|
ShowCircleSet int `json:"showCircleSet"`
|
||||||
|
ShowCmem int `json:"showCmem"`
|
||||||
|
ShowDataFolder int `json:"showDataFolder"`
|
||||||
|
ShowDelReason int `json:"showDelReason"`
|
||||||
|
ShowForward int `json:"showForward"`
|
||||||
|
ShowGroupChat int `json:"showGroupChat"`
|
||||||
|
ShowGroupChatSet int `json:"showGroupChatSet"`
|
||||||
|
ShowGroupSquareSet int `json:"showGroupSquareSet"`
|
||||||
|
ShowLockAddSet int `json:"showLockAddSet"`
|
||||||
|
ShowManager int `json:"showManager"`
|
||||||
|
ShowManagerIdentitySet int `json:"showManagerIdentitySet"`
|
||||||
|
ShowNeedDelReasonSet int `json:"showNeedDelReasonSet"`
|
||||||
|
ShowNotice int `json:"showNotice"`
|
||||||
|
ShowOnlyManagerReplySet int `json:"showOnlyManagerReplySet"`
|
||||||
|
ShowRank int `json:"showRank"`
|
||||||
|
ShowRank2 int `json:"showRank2"`
|
||||||
|
ShowRecycleBin int `json:"showRecycleBin"`
|
||||||
|
ShowReplyByClass int `json:"showReplyByClass"`
|
||||||
|
ShowReplyNeedCheck int `json:"showReplyNeedCheck"`
|
||||||
|
ShowSignbanSet int `json:"showSignbanSet"`
|
||||||
|
ShowSpeechSet int `json:"showSpeechSet"`
|
||||||
|
ShowTopicCheck int `json:"showTopicCheck"`
|
||||||
|
ShowTopicNeedCheck int `json:"showTopicNeedCheck"`
|
||||||
|
ShowTransferSet int `json:"showTransferSet"`
|
||||||
|
} `json:"groupAuth"`
|
||||||
|
OperationAuth struct {
|
||||||
|
Add int `json:"add"`
|
||||||
|
AddTopicToFolder int `json:"addTopicToFolder"`
|
||||||
|
ChoiceSet int `json:"choiceSet"`
|
||||||
|
DelTopicFromFolder int `json:"delTopicFromFolder"`
|
||||||
|
Delete int `json:"delete"`
|
||||||
|
Reply int `json:"reply"`
|
||||||
|
ScoreSet int `json:"scoreSet"`
|
||||||
|
TopSet int `json:"topSet"`
|
||||||
|
Update int `json:"update"`
|
||||||
|
} `json:"operationAuth"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// 手机端学习通上传的文件的json内容(content字段)与网页端上传的有所不同
|
||||||
|
// 网页端json `"puid": 54321, "size": 12345`
|
||||||
|
// 手机端json `"puid": "54321". "size": "12345"`
|
||||||
|
type int_str int
|
||||||
|
|
||||||
|
// json 字符串数字和纯数字解析
|
||||||
|
func (ios *int_str) UnmarshalJSON(data []byte) error {
|
||||||
|
intValue, err := strconv.Atoi(string(bytes.Trim(data, "\"")))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*ios = int_str(intValue)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type File struct {
|
||||||
|
Cataid int `json:"cataid"`
|
||||||
|
Cfid int `json:"cfid"`
|
||||||
|
Content struct {
|
||||||
|
Cfid int `json:"cfid"`
|
||||||
|
Pid int `json:"pid"`
|
||||||
|
FolderName string `json:"folderName"`
|
||||||
|
ShareType int `json:"shareType"`
|
||||||
|
Preview string `json:"preview"`
|
||||||
|
Filetype string `json:"filetype"`
|
||||||
|
PreviewURL string `json:"previewUrl"`
|
||||||
|
IsImg bool `json:"isImg"`
|
||||||
|
ParentPath string `json:"parentPath"`
|
||||||
|
Icon string `json:"icon"`
|
||||||
|
Suffix string `json:"suffix"`
|
||||||
|
Duration int `json:"duration"`
|
||||||
|
Pantype string `json:"pantype"`
|
||||||
|
Puid int_str `json:"puid"`
|
||||||
|
Filepath string `json:"filepath"`
|
||||||
|
Crc string `json:"crc"`
|
||||||
|
Isfile bool `json:"isfile"`
|
||||||
|
Residstr string `json:"residstr"`
|
||||||
|
ObjectID string `json:"objectId"`
|
||||||
|
Extinfo string `json:"extinfo"`
|
||||||
|
Thumbnail string `json:"thumbnail"`
|
||||||
|
Creator int `json:"creator"`
|
||||||
|
ResTypeValue int `json:"resTypeValue"`
|
||||||
|
UploadDateFormat string `json:"uploadDateFormat"`
|
||||||
|
DisableOpt bool `json:"disableOpt"`
|
||||||
|
DownPath string `json:"downPath"`
|
||||||
|
Sort int `json:"sort"`
|
||||||
|
Topsort int `json:"topsort"`
|
||||||
|
Restype string `json:"restype"`
|
||||||
|
Size int_str `json:"size"`
|
||||||
|
UploadDate int64 `json:"uploadDate"`
|
||||||
|
FileSize string `json:"fileSize"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
FileID string `json:"fileId"`
|
||||||
|
} `json:"content"`
|
||||||
|
CreatorID int `json:"creatorId"`
|
||||||
|
DesID string `json:"des_id"`
|
||||||
|
ID int `json:"id"`
|
||||||
|
Inserttime int64 `json:"inserttime"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
Norder int `json:"norder"`
|
||||||
|
OwnerID int `json:"ownerId"`
|
||||||
|
OwnerType int `json:"ownerType"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
Rid int `json:"rid"`
|
||||||
|
Status int `json:"status"`
|
||||||
|
Topsign int `json:"topsign"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListFileResp struct {
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Result int `json:"result"`
|
||||||
|
Status bool `json:"status"`
|
||||||
|
UserAuth UserAuth `json:"userAuth"`
|
||||||
|
List []File `json:"list"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DownResp struct {
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Duration int `json:"duration"`
|
||||||
|
Download string `json:"download"`
|
||||||
|
FileStatus string `json:"fileStatus"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
Status bool `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadDataRsp struct {
|
||||||
|
Result int `json:"result"`
|
||||||
|
Msg struct {
|
||||||
|
Puid int `json:"puid"`
|
||||||
|
Token string `json:"token"`
|
||||||
|
} `json:"msg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadFileDataRsp struct {
|
||||||
|
Result bool `json:"result"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Crc string `json:"crc"`
|
||||||
|
ObjectID string `json:"objectId"`
|
||||||
|
Resid int64 `json:"resid"`
|
||||||
|
Puid int `json:"puid"`
|
||||||
|
Data struct {
|
||||||
|
DisableOpt bool `json:"disableOpt"`
|
||||||
|
Resid int64 `json:"resid"`
|
||||||
|
Crc string `json:"crc"`
|
||||||
|
Puid int `json:"puid"`
|
||||||
|
Isfile bool `json:"isfile"`
|
||||||
|
Pantype string `json:"pantype"`
|
||||||
|
Size int `json:"size"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
ObjectID string `json:"objectId"`
|
||||||
|
Restype string `json:"restype"`
|
||||||
|
UploadDate time.Time `json:"uploadDate"`
|
||||||
|
ModifyDate time.Time `json:"modifyDate"`
|
||||||
|
UploadDateFormat string `json:"uploadDateFormat"`
|
||||||
|
Residstr string `json:"residstr"`
|
||||||
|
Suffix string `json:"suffix"`
|
||||||
|
Preview string `json:"preview"`
|
||||||
|
Thumbnail string `json:"thumbnail"`
|
||||||
|
Creator int `json:"creator"`
|
||||||
|
Duration int `json:"duration"`
|
||||||
|
IsImg bool `json:"isImg"`
|
||||||
|
PreviewURL string `json:"previewUrl"`
|
||||||
|
Filetype string `json:"filetype"`
|
||||||
|
Filepath string `json:"filepath"`
|
||||||
|
Sort int `json:"sort"`
|
||||||
|
Topsort int `json:"topsort"`
|
||||||
|
ResTypeValue int `json:"resTypeValue"`
|
||||||
|
Extinfo string `json:"extinfo"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadDoneParam struct {
|
||||||
|
Cataid string `json:"cataid"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
Param struct {
|
||||||
|
DisableOpt bool `json:"disableOpt"`
|
||||||
|
Resid int64 `json:"resid"`
|
||||||
|
Crc string `json:"crc"`
|
||||||
|
Puid int `json:"puid"`
|
||||||
|
Isfile bool `json:"isfile"`
|
||||||
|
Pantype string `json:"pantype"`
|
||||||
|
Size int `json:"size"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
ObjectID string `json:"objectId"`
|
||||||
|
Restype string `json:"restype"`
|
||||||
|
UploadDate time.Time `json:"uploadDate"`
|
||||||
|
ModifyDate time.Time `json:"modifyDate"`
|
||||||
|
UploadDateFormat string `json:"uploadDateFormat"`
|
||||||
|
Residstr string `json:"residstr"`
|
||||||
|
Suffix string `json:"suffix"`
|
||||||
|
Preview string `json:"preview"`
|
||||||
|
Thumbnail string `json:"thumbnail"`
|
||||||
|
Creator int `json:"creator"`
|
||||||
|
Duration int `json:"duration"`
|
||||||
|
IsImg bool `json:"isImg"`
|
||||||
|
PreviewURL string `json:"previewUrl"`
|
||||||
|
Filetype string `json:"filetype"`
|
||||||
|
Filepath string `json:"filepath"`
|
||||||
|
Sort int `json:"sort"`
|
||||||
|
Topsort int `json:"topsort"`
|
||||||
|
ResTypeValue int `json:"resTypeValue"`
|
||||||
|
Extinfo string `json:"extinfo"`
|
||||||
|
} `json:"param"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileToObj(f File) *model.Object {
|
||||||
|
if len(f.Content.FolderName) > 0 {
|
||||||
|
return &model.Object{
|
||||||
|
ID: fmt.Sprintf("%d", f.ID),
|
||||||
|
Name: f.Content.FolderName,
|
||||||
|
Size: 0,
|
||||||
|
Modified: time.UnixMilli(f.Inserttime),
|
||||||
|
IsFolder: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
paserTime := time.UnixMilli(f.Content.UploadDate)
|
||||||
|
return &model.Object{
|
||||||
|
ID: fmt.Sprintf("%d$%s", f.ID, f.Content.FileID),
|
||||||
|
Name: f.Content.Name,
|
||||||
|
Size: int64(f.Content.Size),
|
||||||
|
Modified: paserTime,
|
||||||
|
IsFolder: false,
|
||||||
|
}
|
||||||
|
}
|
183
drivers/chaoxing/util.go
Normal file
183
drivers/chaoxing/util.go
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
package chaoxing
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/aes"
|
||||||
|
"crypto/cipher"
|
||||||
|
"encoding/base64"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *ChaoXing) requestDownload(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
|
u := d.conf.DowloadApi + pathname
|
||||||
|
req := base.RestyClient.R()
|
||||||
|
req.SetHeaders(map[string]string{
|
||||||
|
"Cookie": d.Cookie,
|
||||||
|
"Accept": "application/json, text/plain, */*",
|
||||||
|
"Referer": d.conf.referer,
|
||||||
|
})
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
if resp != nil {
|
||||||
|
req.SetResult(resp)
|
||||||
|
}
|
||||||
|
var e Resp
|
||||||
|
req.SetError(&e)
|
||||||
|
res, err := req.Execute(method, u)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return res.Body(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
|
u := d.conf.api + pathname
|
||||||
|
if strings.Contains(pathname, "getUploadConfig") {
|
||||||
|
u = pathname
|
||||||
|
}
|
||||||
|
req := base.RestyClient.R()
|
||||||
|
req.SetHeaders(map[string]string{
|
||||||
|
"Cookie": d.Cookie,
|
||||||
|
"Accept": "application/json, text/plain, */*",
|
||||||
|
"Referer": d.conf.referer,
|
||||||
|
})
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
if resp != nil {
|
||||||
|
req.SetResult(resp)
|
||||||
|
}
|
||||||
|
var e Resp
|
||||||
|
req.SetError(&e)
|
||||||
|
res, err := req.Execute(method, u)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return res.Body(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) GetFiles(parent string) ([]File, error) {
|
||||||
|
files := make([]File, 0)
|
||||||
|
query := map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"folderId": parent,
|
||||||
|
"recType": "1",
|
||||||
|
}
|
||||||
|
var resp ListFileResp
|
||||||
|
_, err := d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(query)
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if resp.Result != 1 {
|
||||||
|
msg := fmt.Sprintf("error code is:%d", resp.Result)
|
||||||
|
return nil, errors.New(msg)
|
||||||
|
}
|
||||||
|
if len(resp.List) > 0 {
|
||||||
|
files = append(files, resp.List...)
|
||||||
|
}
|
||||||
|
querys := map[string]string{
|
||||||
|
"bbsid": d.Addition.Bbsid,
|
||||||
|
"folderId": parent,
|
||||||
|
"recType": "2",
|
||||||
|
}
|
||||||
|
var resps ListFileResp
|
||||||
|
_, err = d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(querys)
|
||||||
|
}, &resps)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range resps.List {
|
||||||
|
// 手机端超星上传的文件没有fileID字段,但ObjectID与fileID相同,可代替
|
||||||
|
if file.Content.FileID == "" {
|
||||||
|
file.Content.FileID = file.Content.ObjectID
|
||||||
|
}
|
||||||
|
files = append(files, file)
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func EncryptByAES(message, key string) (string, error) {
|
||||||
|
aesKey := []byte(key)
|
||||||
|
plainText := []byte(message)
|
||||||
|
block, err := aes.NewCipher(aesKey)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
iv := aesKey[:aes.BlockSize]
|
||||||
|
mode := cipher.NewCBCEncrypter(block, iv)
|
||||||
|
padding := aes.BlockSize - len(plainText)%aes.BlockSize
|
||||||
|
paddedText := append(plainText, byte(padding))
|
||||||
|
for i := 0; i < padding-1; i++ {
|
||||||
|
paddedText = append(paddedText, byte(padding))
|
||||||
|
}
|
||||||
|
ciphertext := make([]byte, len(paddedText))
|
||||||
|
mode.CryptBlocks(ciphertext, paddedText)
|
||||||
|
encrypted := base64.StdEncoding.EncodeToString(ciphertext)
|
||||||
|
return encrypted, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func CookiesToString(cookies []*http.Cookie) string {
|
||||||
|
var cookieStr string
|
||||||
|
for _, cookie := range cookies {
|
||||||
|
cookieStr += cookie.Name + "=" + cookie.Value + "; "
|
||||||
|
}
|
||||||
|
if len(cookieStr) > 2 {
|
||||||
|
cookieStr = cookieStr[:len(cookieStr)-2]
|
||||||
|
}
|
||||||
|
return cookieStr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ChaoXing) Login() (string, error) {
|
||||||
|
transferKey := "u2oh6Vu^HWe4_AES"
|
||||||
|
body := &bytes.Buffer{}
|
||||||
|
writer := multipart.NewWriter(body)
|
||||||
|
uname, err := EncryptByAES(d.Addition.UserName, transferKey)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
password, err := EncryptByAES(d.Addition.Password, transferKey)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
err = writer.WriteField("uname", uname)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
err = writer.WriteField("password", password)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
err = writer.WriteField("t", "true")
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
err = writer.Close()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
// Create the request
|
||||||
|
req, err := http.NewRequest("POST", "https://passport2.chaoxing.com/fanyalogin", body)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", writer.FormDataContentType())
|
||||||
|
req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
|
||||||
|
resp, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
return CookiesToString(resp.Cookies()), nil
|
||||||
|
|
||||||
|
}
|
@ -53,6 +53,14 @@ func (d *Cloudreve) List(ctx context.Context, dir model.Obj, args model.ListArgs
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if src.Type == "dir" && d.EnableThumbAndFolderSize {
|
||||||
|
var dprop DirectoryProp
|
||||||
|
err = d.request(http.MethodGet, "/object/property/"+src.Id+"?is_folder=true", nil, &dprop)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
src.Size = dprop.Size
|
||||||
|
}
|
||||||
return objectToObj(src, thumb), nil
|
return objectToObj(src, thumb), nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -9,11 +9,12 @@ type Addition struct {
|
|||||||
// Usually one of two
|
// Usually one of two
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
// define other
|
// define other
|
||||||
Address string `json:"address" required:"true"`
|
Address string `json:"address" required:"true"`
|
||||||
Username string `json:"username"`
|
Username string `json:"username"`
|
||||||
Password string `json:"password"`
|
Password string `json:"password"`
|
||||||
Cookie string `json:"cookie"`
|
Cookie string `json:"cookie"`
|
||||||
CustomUA string `json:"custom_ua"`
|
CustomUA string `json:"custom_ua"`
|
||||||
|
EnableThumbAndFolderSize bool `json:"enable_thumb_and_folder_size"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -44,6 +44,10 @@ type Object struct {
|
|||||||
SourceEnabled bool `json:"source_enabled"`
|
SourceEnabled bool `json:"source_enabled"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DirectoryProp struct {
|
||||||
|
Size int `json:"size"`
|
||||||
|
}
|
||||||
|
|
||||||
func objectToObj(f Object, t model.Thumbnail) *model.ObjThumb {
|
func objectToObj(f Object, t model.Thumbnail) *model.ObjThumb {
|
||||||
return &model.ObjThumb{
|
return &model.ObjThumb{
|
||||||
Object: model.Object{
|
Object: model.Object{
|
||||||
|
@ -151,6 +151,9 @@ func convertSrc(obj model.Obj) map[string]interface{} {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Cloudreve) GetThumb(file Object) (model.Thumbnail, error) {
|
func (d *Cloudreve) GetThumb(file Object) (model.Thumbnail, error) {
|
||||||
|
if !d.Addition.EnableThumbAndFolderSize {
|
||||||
|
return model.Thumbnail{}, nil
|
||||||
|
}
|
||||||
ua := d.CustomUA
|
ua := d.CustomUA
|
||||||
if ua == "" {
|
if ua == "" {
|
||||||
ua = base.UserAgent
|
ua = base.UserAgent
|
||||||
|
@ -124,6 +124,9 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
|||||||
//filter illegal files
|
//filter illegal files
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if !d.ShowHidden && strings.HasPrefix(name, ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
objRes := model.Object{
|
objRes := model.Object{
|
||||||
Name: name,
|
Name: name,
|
||||||
Size: 0,
|
Size: 0,
|
||||||
@ -145,6 +148,9 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
|||||||
//filter illegal files
|
//filter illegal files
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if !d.ShowHidden && strings.HasPrefix(name, ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
objRes := model.Object{
|
objRes := model.Object{
|
||||||
Name: name,
|
Name: name,
|
||||||
Size: size,
|
Size: size,
|
||||||
|
@ -21,6 +21,8 @@ type Addition struct {
|
|||||||
FileNameEncoding string `json:"filename_encoding" type:"select" required:"true" options:"base64,base32,base32768" default:"base64" help:"for advanced user only!"`
|
FileNameEncoding string `json:"filename_encoding" type:"select" required:"true" options:"base64,base32,base32768" default:"base64" help:"for advanced user only!"`
|
||||||
|
|
||||||
Thumbnail bool `json:"thumbnail" required:"true" default:"false" help:"enable thumbnail which pre-generated under .thumbnails folder"`
|
Thumbnail bool `json:"thumbnail" required:"true" default:"false" help:"enable thumbnail which pre-generated under .thumbnails folder"`
|
||||||
|
|
||||||
|
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -45,7 +45,25 @@ func (d *Dropbox) Init(ctx context.Context) error {
|
|||||||
if result != query {
|
if result != query {
|
||||||
return fmt.Errorf("failed to check user: %s", string(res))
|
return fmt.Errorf("failed to check user: %s", string(res))
|
||||||
}
|
}
|
||||||
return nil
|
d.RootNamespaceId, err = d.GetRootNamespaceId(ctx)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) GetRootNamespaceId(ctx context.Context) (string, error) {
|
||||||
|
res, err := d.request("/2/users/get_current_account", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(nil)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
var currentAccountResp CurrentAccountResp
|
||||||
|
err = utils.Json.Unmarshal(res, ¤tAccountResp)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
rootNamespaceId := currentAccountResp.RootInfo.RootNamespaceId
|
||||||
|
return rootNamespaceId, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Dropbox) Drop(ctx context.Context) error {
|
func (d *Dropbox) Drop(ctx context.Context) error {
|
||||||
@ -203,7 +221,7 @@ func (d *Dropbox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
_ = res.Body.Close()
|
_ = res.Body.Close()
|
||||||
|
|
||||||
if count > 0 {
|
if count > 0 {
|
||||||
up((i + 1) * 100 / count)
|
up(float64(i+1) * 100 / float64(count))
|
||||||
}
|
}
|
||||||
|
|
||||||
offset += byteSize
|
offset += byteSize
|
||||||
|
@ -17,7 +17,8 @@ type Addition struct {
|
|||||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
|
|
||||||
AccessToken string
|
AccessToken string
|
||||||
|
RootNamespaceId string
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -23,6 +23,13 @@ type RefreshTokenErrorResp struct {
|
|||||||
ErrorDescription string `json:"error_description"`
|
ErrorDescription string `json:"error_description"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type CurrentAccountResp struct {
|
||||||
|
RootInfo struct {
|
||||||
|
RootNamespaceId string `json:"root_namespace_id"`
|
||||||
|
HomeNamespaceId string `json:"home_namespace_id"`
|
||||||
|
} `json:"root_info"`
|
||||||
|
}
|
||||||
|
|
||||||
type File struct {
|
type File struct {
|
||||||
Tag string `json:".tag"`
|
Tag string `json:".tag"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
@ -46,12 +46,22 @@ func (d *Dropbox) refreshToken() error {
|
|||||||
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||||
if method == http.MethodPost {
|
if d.RootNamespaceId != "" {
|
||||||
req.SetHeader("Content-Type", "application/json")
|
apiPathRootJson, err := utils.Json.MarshalToString(map[string]interface{}{
|
||||||
|
".tag": "root",
|
||||||
|
"root": d.RootNamespaceId,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.SetHeader("Dropbox-API-Path-Root", apiPathRootJson)
|
||||||
}
|
}
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
}
|
}
|
||||||
|
if method == http.MethodPost && req.Body != nil {
|
||||||
|
req.SetHeader("Content-Type", "application/json")
|
||||||
|
}
|
||||||
var e ErrorResp
|
var e ErrorResp
|
||||||
req.SetError(&e)
|
req.SetError(&e)
|
||||||
res, err := req.Execute(method, d.base+uri)
|
res, err := req.Execute(method, d.base+uri)
|
||||||
|
@ -5,7 +5,6 @@ import (
|
|||||||
"crypto/x509"
|
"crypto/x509"
|
||||||
"encoding/pem"
|
"encoding/pem"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
@ -44,7 +43,7 @@ func (d *GoogleDrive) refreshToken() error {
|
|||||||
gdsaFileThis := d.RefreshToken
|
gdsaFileThis := d.RefreshToken
|
||||||
if gdsaFile.IsDir() {
|
if gdsaFile.IsDir() {
|
||||||
if len(d.ServiceAccountFileList) <= 0 {
|
if len(d.ServiceAccountFileList) <= 0 {
|
||||||
gdsaReadDir, gdsaDirErr := ioutil.ReadDir(d.RefreshToken)
|
gdsaReadDir, gdsaDirErr := os.ReadDir(d.RefreshToken)
|
||||||
if gdsaDirErr != nil {
|
if gdsaDirErr != nil {
|
||||||
log.Error("read dir fail")
|
log.Error("read dir fail")
|
||||||
return gdsaDirErr
|
return gdsaDirErr
|
||||||
@ -76,7 +75,7 @@ func (d *GoogleDrive) refreshToken() error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
gdsaFileThisContent, err := ioutil.ReadFile(gdsaFileThis)
|
gdsaFileThisContent, err := os.ReadFile(gdsaFileThis)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -58,9 +58,33 @@ func (d *GooglePhoto) Link(ctx context.Context, file model.Obj, args model.LinkA
|
|||||||
URL: f.BaseURL + "=d",
|
URL: f.BaseURL + "=d",
|
||||||
}, nil
|
}, nil
|
||||||
} else if strings.Contains(f.MimeType, "video/") {
|
} else if strings.Contains(f.MimeType, "video/") {
|
||||||
return &model.Link{
|
var width, height int
|
||||||
URL: f.BaseURL + "=dv",
|
|
||||||
}, nil
|
fmt.Sscanf(f.MediaMetadata.Width, "%d", &width)
|
||||||
|
fmt.Sscanf(f.MediaMetadata.Height, "%d", &height)
|
||||||
|
|
||||||
|
switch {
|
||||||
|
// 1080P
|
||||||
|
case width == 1920 && height == 1080:
|
||||||
|
return &model.Link{
|
||||||
|
URL: f.BaseURL + "=m37",
|
||||||
|
}, nil
|
||||||
|
// 720P
|
||||||
|
case width == 1280 && height == 720:
|
||||||
|
return &model.Link{
|
||||||
|
URL: f.BaseURL + "=m22",
|
||||||
|
}, nil
|
||||||
|
// 360P
|
||||||
|
case width == 640 && height == 360:
|
||||||
|
return &model.Link{
|
||||||
|
URL: f.BaseURL + "=m18",
|
||||||
|
}, nil
|
||||||
|
default:
|
||||||
|
return &model.Link{
|
||||||
|
URL: f.BaseURL + "=dv",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return &model.Link{}, nil
|
return &model.Link{}, nil
|
||||||
}
|
}
|
||||||
|
@ -151,7 +151,7 @@ func (d *GooglePhoto) getMedia(id string) (MediaItem, error) {
|
|||||||
var resp MediaItem
|
var resp MediaItem
|
||||||
|
|
||||||
query := map[string]string{
|
query := map[string]string{
|
||||||
"fields": "baseUrl,mimeType",
|
"fields": "mediaMetadata,baseUrl,mimeType",
|
||||||
}
|
}
|
||||||
_, err := d.request(fmt.Sprintf("https://photoslibrary.googleapis.com/v1/mediaItems/%s", id), http.MethodGet, func(req *resty.Request) {
|
_, err := d.request(fmt.Sprintf("https://photoslibrary.googleapis.com/v1/mediaItems/%s", id), http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
|
381
drivers/ilanzou/driver.go
Normal file
381
drivers/ilanzou/driver.go
Normal file
@ -0,0 +1,381 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/foxxorcat/mopan-sdk-go"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ILanZou struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
|
||||||
|
userID string
|
||||||
|
account string
|
||||||
|
upClient *resty.Client
|
||||||
|
conf Conf
|
||||||
|
config driver.Config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) Config() driver.Config {
|
||||||
|
return d.config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) Init(ctx context.Context) error {
|
||||||
|
d.upClient = base.NewRestyClient().SetTimeout(time.Minute * 10)
|
||||||
|
if d.UUID == "" {
|
||||||
|
res, err := d.unproved("/getUuid", http.MethodGet, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.UUID = utils.Json.Get(res, "uuid").ToString()
|
||||||
|
}
|
||||||
|
res, err := d.proved("/user/account/map", http.MethodGet, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.userID = utils.Json.Get(res, "map", "userId").ToString()
|
||||||
|
d.account = utils.Json.Get(res, "map", "account").ToString()
|
||||||
|
log.Debugf("[ilanzou] init response: %s", res)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
offset := 1
|
||||||
|
limit := 60
|
||||||
|
var res []ListItem
|
||||||
|
for {
|
||||||
|
var resp ListResp
|
||||||
|
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"type": "0",
|
||||||
|
"folderId": dir.GetID(),
|
||||||
|
"offset": strconv.Itoa(offset),
|
||||||
|
"limit": strconv.Itoa(limit),
|
||||||
|
}).SetResult(&resp)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
res = append(res, resp.List...)
|
||||||
|
if resp.TotalPage <= resp.Offset {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
offset++
|
||||||
|
}
|
||||||
|
return utils.SliceConvert(res, func(f ListItem) (model.Obj, error) {
|
||||||
|
updTime, err := time.ParseInLocation("2006-01-02 15:04:05", f.UpdTime, time.Local)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
obj := model.Object{
|
||||||
|
ID: strconv.FormatInt(f.FileId, 10),
|
||||||
|
//Path: "",
|
||||||
|
Name: f.FileName,
|
||||||
|
Size: f.FileSize * 1024,
|
||||||
|
Modified: updTime,
|
||||||
|
Ctime: updTime,
|
||||||
|
IsFolder: false,
|
||||||
|
//HashInfo: utils.HashInfo{},
|
||||||
|
}
|
||||||
|
if f.FileType == 2 {
|
||||||
|
obj.IsFolder = true
|
||||||
|
obj.Size = 0
|
||||||
|
obj.ID = strconv.FormatInt(f.FolderId, 10)
|
||||||
|
obj.Name = f.FolderName
|
||||||
|
}
|
||||||
|
return &obj, nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
u, err := url.Parse(d.conf.base + "/" + d.conf.unproved + "/file/redirect")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
query := u.Query()
|
||||||
|
query.Set("uuid", d.UUID)
|
||||||
|
query.Set("devType", "6")
|
||||||
|
query.Set("devCode", d.UUID)
|
||||||
|
query.Set("devModel", "chrome")
|
||||||
|
query.Set("devVersion", d.conf.devVersion)
|
||||||
|
query.Set("appVersion", "")
|
||||||
|
ts, err := getTimestamp(d.conf.secret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
query.Set("timestamp", ts)
|
||||||
|
query.Set("appToken", d.Token)
|
||||||
|
query.Set("enable", "1")
|
||||||
|
downloadId, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%s", file.GetID(), d.userID)), d.conf.secret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
query.Set("downloadId", hex.EncodeToString(downloadId))
|
||||||
|
auth, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%d", file.GetID(), time.Now().UnixMilli())), d.conf.secret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
query.Set("auth", hex.EncodeToString(auth))
|
||||||
|
u.RawQuery = query.Encode()
|
||||||
|
realURL := u.String()
|
||||||
|
// get the url after redirect
|
||||||
|
res, err := base.NoRedirectClient.R().SetHeaders(map[string]string{
|
||||||
|
//"Origin": d.conf.site,
|
||||||
|
"Referer": d.conf.site + "/",
|
||||||
|
}).Get(realURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if res.StatusCode() == 302 {
|
||||||
|
realURL = res.Header().Get("location")
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("redirect failed, status: %d", res.StatusCode())
|
||||||
|
}
|
||||||
|
link := model.Link{URL: realURL}
|
||||||
|
return &link, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||||
|
res, err := d.proved("/file/folder/save", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"folderDesc": "",
|
||||||
|
"folderId": parentDir.GetID(),
|
||||||
|
"folderName": dirName,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &model.Object{
|
||||||
|
ID: utils.Json.Get(res, "list", "0", "id").ToString(),
|
||||||
|
//Path: "",
|
||||||
|
Name: dirName,
|
||||||
|
Size: 0,
|
||||||
|
Modified: time.Now(),
|
||||||
|
Ctime: time.Now(),
|
||||||
|
IsFolder: true,
|
||||||
|
//HashInfo: utils.HashInfo{},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
var fileIds, folderIds []string
|
||||||
|
if srcObj.IsDir() {
|
||||||
|
folderIds = []string{srcObj.GetID()}
|
||||||
|
} else {
|
||||||
|
fileIds = []string{srcObj.GetID()}
|
||||||
|
}
|
||||||
|
_, err := d.proved("/file/folder/move", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"folderIds": strings.Join(folderIds, ","),
|
||||||
|
"fileIds": strings.Join(fileIds, ","),
|
||||||
|
"targetId": dstDir.GetID(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return srcObj, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||||
|
var err error
|
||||||
|
if srcObj.IsDir() {
|
||||||
|
_, err = d.proved("/file/folder/edit", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"folderDesc": "",
|
||||||
|
"folderId": srcObj.GetID(),
|
||||||
|
"folderName": newName,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
_, err = d.proved("/file/edit", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"fileDesc": "",
|
||||||
|
"fileId": srcObj.GetID(),
|
||||||
|
"fileName": newName,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &model.Object{
|
||||||
|
ID: srcObj.GetID(),
|
||||||
|
//Path: "",
|
||||||
|
Name: newName,
|
||||||
|
Size: srcObj.GetSize(),
|
||||||
|
Modified: time.Now(),
|
||||||
|
Ctime: srcObj.CreateTime(),
|
||||||
|
IsFolder: srcObj.IsDir(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
// TODO copy obj, optional
|
||||||
|
return nil, errs.NotImplement
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
var fileIds, folderIds []string
|
||||||
|
if obj.IsDir() {
|
||||||
|
folderIds = []string{obj.GetID()}
|
||||||
|
} else {
|
||||||
|
fileIds = []string{obj.GetID()}
|
||||||
|
}
|
||||||
|
_, err := d.proved("/file/delete", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"folderIds": strings.Join(folderIds, ","),
|
||||||
|
"fileIds": strings.Join(fileIds, ","),
|
||||||
|
"status": 0,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const DefaultPartSize = 1024 * 1024 * 8
|
||||||
|
|
||||||
|
func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
|
h := md5.New()
|
||||||
|
// need to calculate md5 of the full content
|
||||||
|
tempFile, err := stream.CacheFullInTempFile()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
_ = tempFile.Close()
|
||||||
|
}()
|
||||||
|
if _, err = io.Copy(h, tempFile); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_, err = tempFile.Seek(0, io.SeekStart)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
etag := hex.EncodeToString(h.Sum(nil))
|
||||||
|
// get upToken
|
||||||
|
res, err := d.proved("/7n/getUpToken", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"fileId": "",
|
||||||
|
"fileName": stream.GetName(),
|
||||||
|
"fileSize": stream.GetSize() / 1024,
|
||||||
|
"folderId": dstDir.GetID(),
|
||||||
|
"md5": etag,
|
||||||
|
"type": 1,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
upToken := utils.Json.Get(res, "upToken").ToString()
|
||||||
|
now := time.Now()
|
||||||
|
key := fmt.Sprintf("disk/%d/%d/%d/%s/%016d", now.Year(), now.Month(), now.Day(), d.account, now.UnixMilli())
|
||||||
|
var token string
|
||||||
|
if stream.GetSize() <= DefaultPartSize {
|
||||||
|
res, err := d.upClient.R().SetMultipartFormData(map[string]string{
|
||||||
|
"token": upToken,
|
||||||
|
"key": key,
|
||||||
|
"fname": stream.GetName(),
|
||||||
|
}).SetMultipartField("file", stream.GetName(), stream.GetMimetype(), tempFile).
|
||||||
|
Post("https://upload.qiniup.com/")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
token = utils.Json.Get(res.Body(), "token").ToString()
|
||||||
|
} else {
|
||||||
|
keyBase64 := base64.URLEncoding.EncodeToString([]byte(key))
|
||||||
|
res, err := d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads", d.conf.bucket, keyBase64))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
uploadId := utils.Json.Get(res.Body(), "uploadId").ToString()
|
||||||
|
parts := make([]Part, 0)
|
||||||
|
partNum := (stream.GetSize() + DefaultPartSize - 1) / DefaultPartSize
|
||||||
|
for i := 1; i <= int(partNum); i++ {
|
||||||
|
u := fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s/%d", d.conf.bucket, keyBase64, uploadId, i)
|
||||||
|
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(io.LimitReader(tempFile, DefaultPartSize)).Put(u)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
etag := utils.Json.Get(res.Body(), "etag").ToString()
|
||||||
|
parts = append(parts, Part{
|
||||||
|
PartNumber: i,
|
||||||
|
ETag: etag,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(base.Json{
|
||||||
|
"fnmae": stream.GetName(),
|
||||||
|
"parts": parts,
|
||||||
|
}).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s", d.conf.bucket, keyBase64, uploadId))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
token = utils.Json.Get(res.Body(), "token").ToString()
|
||||||
|
}
|
||||||
|
// commit upload
|
||||||
|
var resp UploadResultResp
|
||||||
|
for i := 0; i < 10; i++ {
|
||||||
|
_, err = d.unproved("/7n/results", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"tokenList": token,
|
||||||
|
"tokenTime": time.Now().Format("Mon Jan 02 2006 15:04:05 GMT-0700 (MST)"),
|
||||||
|
}).SetResult(&resp)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(resp.List) == 0 {
|
||||||
|
return nil, fmt.Errorf("upload failed, empty response")
|
||||||
|
}
|
||||||
|
if resp.List[0].Status == 1 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
time.Sleep(time.Second * 1)
|
||||||
|
}
|
||||||
|
file := resp.List[0]
|
||||||
|
if file.Status != 1 {
|
||||||
|
return nil, fmt.Errorf("upload failed, status: %d", resp.List[0].Status)
|
||||||
|
}
|
||||||
|
return &model.Object{
|
||||||
|
ID: strconv.FormatInt(file.FileId, 10),
|
||||||
|
//Path: ,
|
||||||
|
Name: file.FileName,
|
||||||
|
Size: stream.GetSize(),
|
||||||
|
Modified: stream.ModTime(),
|
||||||
|
Ctime: stream.CreateTime(),
|
||||||
|
IsFolder: false,
|
||||||
|
HashInfo: utils.NewHashInfo(utils.MD5, etag),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//func (d *ILanZou) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||||
|
// return nil, errs.NotSupport
|
||||||
|
//}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*ILanZou)(nil)
|
80
drivers/ilanzou/meta.go
Normal file
80
drivers/ilanzou/meta.go
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
driver.RootID
|
||||||
|
Username string `json:"username" type:"string" required:"true"`
|
||||||
|
Password string `json:"password" type:"string" required:"true"`
|
||||||
|
|
||||||
|
Token string
|
||||||
|
UUID string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Conf struct {
|
||||||
|
base string
|
||||||
|
secret []byte
|
||||||
|
bucket string
|
||||||
|
unproved string
|
||||||
|
proved string
|
||||||
|
devVersion string
|
||||||
|
site string
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &ILanZou{
|
||||||
|
config: driver.Config{
|
||||||
|
Name: "ILanZou",
|
||||||
|
LocalSort: false,
|
||||||
|
OnlyLocal: false,
|
||||||
|
OnlyProxy: false,
|
||||||
|
NoCache: false,
|
||||||
|
NoUpload: false,
|
||||||
|
NeedMs: false,
|
||||||
|
DefaultRoot: "0",
|
||||||
|
CheckStatus: false,
|
||||||
|
Alert: "",
|
||||||
|
NoOverwriteUpload: false,
|
||||||
|
},
|
||||||
|
conf: Conf{
|
||||||
|
base: "https://api.ilanzou.com",
|
||||||
|
secret: []byte("lanZouY-disk-app"),
|
||||||
|
bucket: "wpanstore-lanzou",
|
||||||
|
unproved: "unproved",
|
||||||
|
proved: "proved",
|
||||||
|
devVersion: "122",
|
||||||
|
site: "https://www.ilanzou.com",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &ILanZou{
|
||||||
|
config: driver.Config{
|
||||||
|
Name: "FeijiPan",
|
||||||
|
LocalSort: false,
|
||||||
|
OnlyLocal: false,
|
||||||
|
OnlyProxy: false,
|
||||||
|
NoCache: false,
|
||||||
|
NoUpload: false,
|
||||||
|
NeedMs: false,
|
||||||
|
DefaultRoot: "0",
|
||||||
|
CheckStatus: false,
|
||||||
|
Alert: "",
|
||||||
|
NoOverwriteUpload: false,
|
||||||
|
},
|
||||||
|
conf: Conf{
|
||||||
|
base: "https://api.feijipan.com",
|
||||||
|
secret: []byte("dingHao-disk-app"),
|
||||||
|
bucket: "wpanstore",
|
||||||
|
unproved: "ws",
|
||||||
|
proved: "app",
|
||||||
|
devVersion: "121",
|
||||||
|
site: "https://www.feijipan.com",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
57
drivers/ilanzou/types.go
Normal file
57
drivers/ilanzou/types.go
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
type ListResp struct {
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
Code int `json:"code"`
|
||||||
|
Offset int `json:"offset"`
|
||||||
|
TotalPage int `json:"totalPage"`
|
||||||
|
Limit int `json:"limit"`
|
||||||
|
List []ListItem `json:"list"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListItem struct {
|
||||||
|
IconId int `json:"iconId"`
|
||||||
|
IsAmt int `json:"isAmt"`
|
||||||
|
FolderDesc string `json:"folderDesc,omitempty"`
|
||||||
|
AddTime string `json:"addTime"`
|
||||||
|
FolderId int64 `json:"folderId"`
|
||||||
|
ParentId int64 `json:"parentId"`
|
||||||
|
ParentName string `json:"parentName"`
|
||||||
|
NoteType int `json:"noteType,omitempty"`
|
||||||
|
UpdTime string `json:"updTime"`
|
||||||
|
IsShare int `json:"isShare"`
|
||||||
|
FolderIcon string `json:"folderIcon,omitempty"`
|
||||||
|
FolderName string `json:"folderName,omitempty"`
|
||||||
|
FileType int `json:"fileType"`
|
||||||
|
Status int `json:"status"`
|
||||||
|
IsFileShare int `json:"isFileShare,omitempty"`
|
||||||
|
FileName string `json:"fileName,omitempty"`
|
||||||
|
FileStars float64 `json:"fileStars,omitempty"`
|
||||||
|
IsFileDownload int `json:"isFileDownload,omitempty"`
|
||||||
|
FileComments int `json:"fileComments,omitempty"`
|
||||||
|
FileSize int64 `json:"fileSize,omitempty"`
|
||||||
|
FileIcon string `json:"fileIcon,omitempty"`
|
||||||
|
FileDownloads int `json:"fileDownloads,omitempty"`
|
||||||
|
FileUrl interface{} `json:"fileUrl"`
|
||||||
|
FileLikes int `json:"fileLikes,omitempty"`
|
||||||
|
FileId int64 `json:"fileId,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Part struct {
|
||||||
|
PartNumber int `json:"partNumber"`
|
||||||
|
ETag string `json:"etag"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadResultResp struct {
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Code int `json:"code"`
|
||||||
|
List []struct {
|
||||||
|
FileIconId int `json:"fileIconId"`
|
||||||
|
FileName string `json:"fileName"`
|
||||||
|
FileIcon string `json:"fileIcon"`
|
||||||
|
FileId int64 `json:"fileId"`
|
||||||
|
Status int `json:"status"`
|
||||||
|
Token string `json:"token"`
|
||||||
|
} `json:"list"`
|
||||||
|
}
|
101
drivers/ilanzou/util.go
Normal file
101
drivers/ilanzou/util.go
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/foxxorcat/mopan-sdk-go"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *ILanZou) login() error {
|
||||||
|
res, err := d.unproved("/login", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"loginName": d.Username,
|
||||||
|
"loginPwd": d.Password,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.Token = utils.Json.Get(res, "data", "appToken").ToString()
|
||||||
|
if d.Token == "" {
|
||||||
|
return fmt.Errorf("failed to login: token is empty, resp: %s", res)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTimestamp(secret []byte) (string, error) {
|
||||||
|
ts := time.Now().UnixMilli()
|
||||||
|
tsStr := strconv.FormatInt(ts, 10)
|
||||||
|
res, err := mopan.AesEncrypt([]byte(tsStr), secret)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return hex.EncodeToString(res), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, proved bool, retry ...bool) ([]byte, error) {
|
||||||
|
req := base.RestyClient.R()
|
||||||
|
ts, err := getTimestamp(d.conf.secret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"uuid": d.UUID,
|
||||||
|
"devType": "6",
|
||||||
|
"devCode": d.UUID,
|
||||||
|
"devModel": "chrome",
|
||||||
|
"devVersion": d.conf.devVersion,
|
||||||
|
"appVersion": "",
|
||||||
|
"timestamp": ts,
|
||||||
|
//"appToken": d.Token,
|
||||||
|
"extra": "2",
|
||||||
|
})
|
||||||
|
req.SetHeaders(map[string]string{
|
||||||
|
"Origin": d.conf.site,
|
||||||
|
"Referer": d.conf.site + "/",
|
||||||
|
})
|
||||||
|
if proved {
|
||||||
|
req.SetQueryParam("appToken", d.Token)
|
||||||
|
}
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
res, err := req.Execute(method, d.conf.base+pathname)
|
||||||
|
if err != nil {
|
||||||
|
if res != nil {
|
||||||
|
log.Errorf("[iLanZou] request error: %s", res.String())
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
isRetry := len(retry) > 0 && retry[0]
|
||||||
|
body := res.Body()
|
||||||
|
code := utils.Json.Get(body, "code").ToInt()
|
||||||
|
msg := utils.Json.Get(body, "msg").ToString()
|
||||||
|
if code != 200 {
|
||||||
|
if !isRetry && proved && (utils.SliceContains([]int{-1, -2}, code) || d.Token == "") {
|
||||||
|
err = d.login()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return d.request(pathname, method, callback, proved, true)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("%d: %s", code, msg)
|
||||||
|
}
|
||||||
|
return body, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) unproved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
|
||||||
|
return d.request("/"+d.conf.unproved+pathname, method, callback, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ILanZou) proved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
|
||||||
|
return d.request("/"+d.conf.proved+pathname, method, callback, true)
|
||||||
|
}
|
@ -62,7 +62,7 @@ func (d *IPFS) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]
|
|||||||
for _, file := range dirs {
|
for _, file := range dirs {
|
||||||
gateurl := *d.gateURL
|
gateurl := *d.gateURL
|
||||||
gateurl.Path = "ipfs/" + file.Hash
|
gateurl.Path = "ipfs/" + file.Hash
|
||||||
gateurl.RawQuery = "filename=" + file.Name
|
gateurl.RawQuery = "filename=" + url.PathEscape(file.Name)
|
||||||
objlist = append(objlist, &model.ObjectURL{
|
objlist = append(objlist, &model.ObjectURL{
|
||||||
Object: model.Object{ID: file.Hash, Name: file.Name, Size: int64(file.Size), IsFolder: file.Type == 1},
|
Object: model.Object{ID: file.Hash, Name: file.Name, Size: int64(file.Size), IsFolder: file.Type == 1},
|
||||||
Url: model.Url{Url: gateurl.String()},
|
Url: model.Url{Url: gateurl.String()},
|
||||||
@ -73,7 +73,7 @@ func (d *IPFS) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *IPFS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *IPFS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
link := d.Gateway + "/ipfs/" + file.GetID() + "/?filename=" + file.GetName()
|
link := d.Gateway + "/ipfs/" + file.GetID() + "/?filename=" + url.PathEscape(file.GetName())
|
||||||
return &model.Link{URL: link}, nil
|
return &model.Link{URL: link}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,10 +257,18 @@ func (d *Local) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
|
|
||||||
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
var err error
|
var err error
|
||||||
if obj.IsDir() {
|
if utils.SliceContains([]string{"", "delete permanently"}, d.RecycleBinPath) {
|
||||||
err = os.RemoveAll(obj.GetPath())
|
if obj.IsDir() {
|
||||||
|
err = os.RemoveAll(obj.GetPath())
|
||||||
|
} else {
|
||||||
|
err = os.Remove(obj.GetPath())
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
err = os.Remove(obj.GetPath())
|
dstPath := filepath.Join(d.RecycleBinPath, obj.GetName())
|
||||||
|
if utils.Exists(dstPath) {
|
||||||
|
dstPath = filepath.Join(d.RecycleBinPath, obj.GetName()+"_"+time.Now().Format("20060102150405"))
|
||||||
|
}
|
||||||
|
err = os.Rename(obj.GetPath(), dstPath)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -11,6 +11,7 @@ type Addition struct {
|
|||||||
ThumbCacheFolder string `json:"thumb_cache_folder"`
|
ThumbCacheFolder string `json:"thumb_cache_folder"`
|
||||||
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
||||||
MkdirPerm string `json:"mkdir_perm" default:"777"`
|
MkdirPerm string `json:"mkdir_perm" default:"777"`
|
||||||
|
RecycleBinPath string `json:"recycle_bin_path" default:"delete permanently" help:"path to recycle bin, delete permanently if empty or keep 'delete permanently'"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -188,6 +188,9 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
_ = tempFile.Close()
|
_ = tempFile.Close()
|
||||||
}()
|
}()
|
||||||
uploader := s3manager.NewUploader(s)
|
uploader := s3manager.NewUploader(s)
|
||||||
|
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||||
|
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||||
|
}
|
||||||
input := &s3manager.UploadInput{
|
input := &s3manager.UploadInput{
|
||||||
Bucket: &resp.Data.Bucket,
|
Bucket: &resp.Data.Bucket,
|
||||||
Key: &resp.Data.Object,
|
Key: &resp.Data.Object,
|
||||||
|
@ -4,11 +4,12 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
|
||||||
"github.com/rclone/rclone/lib/readers"
|
|
||||||
"io"
|
"io"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
|
"github.com/rclone/rclone/lib/readers"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
@ -169,7 +170,7 @@ func (d *Mega) Put(ctx context.Context, dstDir model.Obj, stream model.FileStrea
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
up(id * 100 / u.Chunks())
|
up(float64(id) * 100 / float64(u.Chunks()))
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = u.Finish()
|
_, err = u.Finish()
|
||||||
|
@ -43,23 +43,31 @@ func (d *MoPan) Init(ctx context.Context) error {
|
|||||||
if d.uploadThread < 1 || d.uploadThread > 32 {
|
if d.uploadThread < 1 || d.uploadThread > 32 {
|
||||||
d.uploadThread, d.UploadThread = 3, "3"
|
d.uploadThread, d.UploadThread = 3, "3"
|
||||||
}
|
}
|
||||||
login := func() error {
|
|
||||||
data, err := d.client.Login(d.Phone, d.Password)
|
defer func() { d.SMSCode = "" }()
|
||||||
|
|
||||||
|
login := func() (err error) {
|
||||||
|
var loginData *mopan.LoginResp
|
||||||
|
if d.SMSCode != "" {
|
||||||
|
loginData, err = d.client.LoginBySmsStep2(d.Phone, d.SMSCode)
|
||||||
|
} else {
|
||||||
|
loginData, err = d.client.Login(d.Phone, d.Password)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
d.client.SetAuthorization(data.Token)
|
d.client.SetAuthorization(loginData.Token)
|
||||||
|
|
||||||
info, err := d.client.GetUserInfo()
|
info, err := d.client.GetUserInfo()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
d.userID = info.UserID
|
d.userID = info.UserID
|
||||||
log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, data.UserCloudStorageRelations)
|
log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, loginData.UserCloudStorageRelations)
|
||||||
cloudCircleApp, _ := d.client.QueryAllCloudCircleApp()
|
cloudCircleApp, _ := d.client.QueryAllCloudCircleApp()
|
||||||
log.Debugf("[mopan] Phone: %s CloudCircleApp: %+v", d.Phone, cloudCircleApp)
|
log.Debugf("[mopan] Phone: %s CloudCircleApp: %+v", d.Phone, cloudCircleApp)
|
||||||
if d.RootFolderID == "" {
|
if d.RootFolderID == "" {
|
||||||
for _, userCloudStorage := range data.UserCloudStorageRelations {
|
for _, userCloudStorage := range loginData.UserCloudStorageRelations {
|
||||||
if userCloudStorage.Path == "/文件" {
|
if userCloudStorage.Path == "/文件" {
|
||||||
d.RootFolderID = userCloudStorage.FolderID
|
d.RootFolderID = userCloudStorage.FolderID
|
||||||
}
|
}
|
||||||
@ -76,8 +84,20 @@ func (d *MoPan) Init(ctx context.Context) error {
|
|||||||
op.MustSaveDriverStorage(d)
|
op.MustSaveDriverStorage(d)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}).SetDeviceInfo(d.DeviceInfo)
|
})
|
||||||
d.DeviceInfo = d.client.GetDeviceInfo()
|
|
||||||
|
var deviceInfo mopan.DeviceInfo
|
||||||
|
if strings.TrimSpace(d.DeviceInfo) != "" && utils.Json.UnmarshalFromString(d.DeviceInfo, &deviceInfo) == nil {
|
||||||
|
d.client.SetDeviceInfo(&deviceInfo)
|
||||||
|
}
|
||||||
|
d.DeviceInfo, _ = utils.Json.MarshalToString(d.client.GetDeviceInfo())
|
||||||
|
|
||||||
|
if strings.Contains(d.SMSCode, "send") {
|
||||||
|
if _, err := d.client.LoginBySms(d.Phone); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return errors.New("please enter the SMS code")
|
||||||
|
}
|
||||||
return login()
|
return login()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,10 +139,13 @@ func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
|||||||
}
|
}
|
||||||
|
|
||||||
data.DownloadUrl = strings.Replace(strings.ReplaceAll(data.DownloadUrl, "&", "&"), "http://", "https://", 1)
|
data.DownloadUrl = strings.Replace(strings.ReplaceAll(data.DownloadUrl, "&", "&"), "http://", "https://", 1)
|
||||||
res, err := base.NoRedirectClient.R().SetContext(ctx).Head(data.DownloadUrl)
|
res, err := base.NoRedirectClient.R().SetDoNotParseResponse(true).SetContext(ctx).Get(data.DownloadUrl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
defer func() {
|
||||||
|
_ = res.RawBody().Close()
|
||||||
|
}()
|
||||||
if res.StatusCode() == 302 {
|
if res.StatusCode() == 302 {
|
||||||
data.DownloadUrl = res.Header().Get("location")
|
data.DownloadUrl = res.Header().Get("location")
|
||||||
}
|
}
|
||||||
@ -272,7 +295,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !initUpdload.FileDataExists {
|
if !initUpdload.FileDataExists {
|
||||||
utils.Log.Error(d.client.CloudDiskStartBusiness())
|
// utils.Log.Error(d.client.CloudDiskStartBusiness())
|
||||||
|
|
||||||
threadG, upCtx := errgroup.NewGroupWithContext(ctx, d.uploadThread,
|
threadG, upCtx := errgroup.NewGroupWithContext(ctx, d.uploadThread,
|
||||||
retry.Attempts(3),
|
retry.Attempts(3),
|
||||||
@ -300,6 +323,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
req.ContentLength = byteSize
|
||||||
resp, err := base.HttpClient.Do(req)
|
resp, err := base.HttpClient.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -308,7 +332,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
|||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
return fmt.Errorf("upload err,code=%d", resp.StatusCode)
|
return fmt.Errorf("upload err,code=%d", resp.StatusCode)
|
||||||
}
|
}
|
||||||
up(100 * int(threadG.Success()) / len(parts))
|
up(100 * float64(threadG.Success()) / float64(len(parts)))
|
||||||
initUpdload.PartInfos[i] = ""
|
initUpdload.PartInfos[i] = ""
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
@ -8,6 +8,7 @@ import (
|
|||||||
type Addition struct {
|
type Addition struct {
|
||||||
Phone string `json:"phone" required:"true"`
|
Phone string `json:"phone" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
|
SMSCode string `json:"sms_code" help:"input 'send' send sms "`
|
||||||
|
|
||||||
RootFolderID string `json:"root_folder_id" default:""`
|
RootFolderID string `json:"root_folder_id" default:""`
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -19,6 +20,8 @@ type Onedrive struct {
|
|||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
AccessToken string
|
AccessToken string
|
||||||
|
root *Object
|
||||||
|
mutex sync.Mutex
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Onedrive) Config() driver.Config {
|
func (d *Onedrive) Config() driver.Config {
|
||||||
@ -40,6 +43,42 @@ func (d *Onedrive) Drop(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Onedrive) GetRoot(ctx context.Context) (model.Obj, error) {
|
||||||
|
if d.root != nil {
|
||||||
|
return d.root, nil
|
||||||
|
}
|
||||||
|
d.mutex.Lock()
|
||||||
|
defer d.mutex.Unlock()
|
||||||
|
root := &Object{
|
||||||
|
ObjThumb: model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
ID: "root",
|
||||||
|
Path: d.RootFolderPath,
|
||||||
|
Name: "root",
|
||||||
|
Size: 0,
|
||||||
|
Modified: d.Modified,
|
||||||
|
Ctime: d.Modified,
|
||||||
|
IsFolder: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ParentID: "",
|
||||||
|
}
|
||||||
|
if !utils.PathEqual(d.RootFolderPath, "/") {
|
||||||
|
// get root folder id
|
||||||
|
url := d.GetMetaUrl(false, d.RootFolderPath)
|
||||||
|
var resp struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
}
|
||||||
|
_, err := d.Request(url, http.MethodGet, nil, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
root.ID = resp.Id
|
||||||
|
}
|
||||||
|
d.root = root
|
||||||
|
return d.root, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (d *Onedrive) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *Onedrive) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
files, err := d.getFiles(dir.GetPath())
|
files, err := d.getFiles(dir.GetPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -203,7 +203,7 @@ func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
return errors.New(string(data))
|
return errors.New(string(data))
|
||||||
}
|
}
|
||||||
res.Body.Close()
|
res.Body.Close()
|
||||||
up(int(finish * 100 / stream.GetSize()))
|
up(float64(finish) * 100 / float64(stream.GetSize()))
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -19,6 +20,8 @@ type OnedriveAPP struct {
|
|||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
AccessToken string
|
AccessToken string
|
||||||
|
root *Object
|
||||||
|
mutex sync.Mutex
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *OnedriveAPP) Config() driver.Config {
|
func (d *OnedriveAPP) Config() driver.Config {
|
||||||
@ -40,6 +43,42 @@ func (d *OnedriveAPP) Drop(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *OnedriveAPP) GetRoot(ctx context.Context) (model.Obj, error) {
|
||||||
|
if d.root != nil {
|
||||||
|
return d.root, nil
|
||||||
|
}
|
||||||
|
d.mutex.Lock()
|
||||||
|
defer d.mutex.Unlock()
|
||||||
|
root := &Object{
|
||||||
|
ObjThumb: model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
ID: "root",
|
||||||
|
Path: d.RootFolderPath,
|
||||||
|
Name: "root",
|
||||||
|
Size: 0,
|
||||||
|
Modified: d.Modified,
|
||||||
|
Ctime: d.Modified,
|
||||||
|
IsFolder: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ParentID: "",
|
||||||
|
}
|
||||||
|
if !utils.PathEqual(d.RootFolderPath, "/") {
|
||||||
|
// get root folder id
|
||||||
|
url := d.GetMetaUrl(false, d.RootFolderPath)
|
||||||
|
var resp struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
}
|
||||||
|
_, err := d.Request(url, http.MethodGet, nil, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
root.ID = resp.Id
|
||||||
|
}
|
||||||
|
d.root = root
|
||||||
|
return d.root, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (d *OnedriveAPP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *OnedriveAPP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
files, err := d.getFiles(dir.GetPath())
|
files, err := d.getFiles(dir.GetPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -194,7 +194,7 @@ func (d *OnedriveAPP) upBig(ctx context.Context, dstDir model.Obj, stream model.
|
|||||||
return errors.New(string(data))
|
return errors.New(string(data))
|
||||||
}
|
}
|
||||||
res.Body.Close()
|
res.Body.Close()
|
||||||
up(int(finish * 100 / stream.GetSize()))
|
up(float64(finish) * 100 / float64(stream.GetSize()))
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -172,6 +172,9 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
uploader := s3manager.NewUploader(ss)
|
uploader := s3manager.NewUploader(ss)
|
||||||
|
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||||
|
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||||
|
}
|
||||||
input := &s3manager.UploadInput{
|
input := &s3manager.UploadInput{
|
||||||
Bucket: ¶ms.Bucket,
|
Bucket: ¶ms.Bucket,
|
||||||
Key: ¶ms.Key,
|
Key: ¶ms.Key,
|
||||||
|
@ -209,7 +209,7 @@ func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
|||||||
}
|
}
|
||||||
md5s = append(md5s, m)
|
md5s = append(md5s, m)
|
||||||
partNumber++
|
partNumber++
|
||||||
up(int(100 * (total - left) / total))
|
up(100 * float64(total-left) / float64(total))
|
||||||
}
|
}
|
||||||
err = d.upCommit(pre, md5s)
|
err = d.upCommit(pre, md5s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
437
drivers/quqi/driver.go
Normal file
437
drivers/quqi/driver.go
Normal file
@ -0,0 +1,437 @@
|
|||||||
|
package quqi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||||
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
|
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||||
|
"github.com/aws/aws-sdk-go/aws/session"
|
||||||
|
"github.com/aws/aws-sdk-go/service/s3"
|
||||||
|
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Quqi struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
Cookie string // Cookie
|
||||||
|
GroupID string // 私人云群组ID
|
||||||
|
ClientID string // 随机生成客户端ID 经过测试,部分接口调用若不携带client id会出现错误
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Init(ctx context.Context) error {
|
||||||
|
// 登录
|
||||||
|
if err := d.login(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 生成随机client id (与网页端生成逻辑一致)
|
||||||
|
d.ClientID = "quqipc_" + random.String(10)
|
||||||
|
|
||||||
|
// 获取私人云ID (暂时仅获取私人云)
|
||||||
|
groupResp := &GroupRes{}
|
||||||
|
if _, err := d.request("group.quqi.com", "/v1/group/list", resty.MethodGet, nil, groupResp); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, groupInfo := range groupResp.Data {
|
||||||
|
if groupInfo == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if groupInfo.Type == 2 {
|
||||||
|
d.GroupID = strconv.Itoa(groupInfo.ID)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if d.GroupID == "" {
|
||||||
|
return errs.StorageNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
var (
|
||||||
|
listResp = &ListRes{}
|
||||||
|
files []model.Obj
|
||||||
|
)
|
||||||
|
|
||||||
|
if _, err := d.request("", "/api/dir/ls", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"node_id": dir.GetID(),
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, listResp); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if listResp.Data == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// dirs
|
||||||
|
for _, dirInfo := range listResp.Data.Dir {
|
||||||
|
if dirInfo == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
files = append(files, &model.Object{
|
||||||
|
ID: strconv.FormatInt(dirInfo.NodeID, 10),
|
||||||
|
Name: dirInfo.Name,
|
||||||
|
Modified: time.Unix(dirInfo.UpdateTime, 0),
|
||||||
|
Ctime: time.Unix(dirInfo.AddTime, 0),
|
||||||
|
IsFolder: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// files
|
||||||
|
for _, fileInfo := range listResp.Data.File {
|
||||||
|
if fileInfo == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if fileInfo.EXT != "" {
|
||||||
|
fileInfo.Name = strings.Join([]string{fileInfo.Name, fileInfo.EXT}, ".")
|
||||||
|
}
|
||||||
|
|
||||||
|
files = append(files, &model.Object{
|
||||||
|
ID: strconv.FormatInt(fileInfo.NodeID, 10),
|
||||||
|
Name: fileInfo.Name,
|
||||||
|
Size: fileInfo.Size,
|
||||||
|
Modified: time.Unix(fileInfo.UpdateTime, 0),
|
||||||
|
Ctime: time.Unix(fileInfo.AddTime, 0),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
if d.CDN {
|
||||||
|
link, err := d.linkFromCDN(file.GetID())
|
||||||
|
if err != nil {
|
||||||
|
log.Warn(err)
|
||||||
|
} else {
|
||||||
|
return link, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
link, err := d.linkFromPreview(file.GetID())
|
||||||
|
if err != nil {
|
||||||
|
log.Warn(err)
|
||||||
|
} else {
|
||||||
|
return link, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
link, err = d.linkFromDownload(file.GetID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return link, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||||
|
var (
|
||||||
|
makeDirRes = &MakeDirRes{}
|
||||||
|
timeNow = time.Now()
|
||||||
|
)
|
||||||
|
|
||||||
|
if _, err := d.request("", "/api/dir/mkDir", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"parent_id": parentDir.GetID(),
|
||||||
|
"name": dirName,
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, makeDirRes); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Object{
|
||||||
|
ID: strconv.FormatInt(makeDirRes.Data.NodeID, 10),
|
||||||
|
Name: dirName,
|
||||||
|
Modified: timeNow,
|
||||||
|
Ctime: timeNow,
|
||||||
|
IsFolder: true,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
var moveRes = &MoveRes{}
|
||||||
|
|
||||||
|
if _, err := d.request("", "/api/dir/mvDir", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"node_id": dstDir.GetID(),
|
||||||
|
"source_quqi_id": d.GroupID,
|
||||||
|
"source_tree_id": "1",
|
||||||
|
"source_node_id": srcObj.GetID(),
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, moveRes); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Object{
|
||||||
|
ID: strconv.FormatInt(moveRes.Data.NodeID, 10),
|
||||||
|
Name: moveRes.Data.NodeName,
|
||||||
|
Size: srcObj.GetSize(),
|
||||||
|
Modified: time.Now(),
|
||||||
|
Ctime: srcObj.CreateTime(),
|
||||||
|
IsFolder: srcObj.IsDir(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||||
|
var realName = newName
|
||||||
|
|
||||||
|
if !srcObj.IsDir() {
|
||||||
|
srcExt, newExt := utils.Ext(srcObj.GetName()), utils.Ext(newName)
|
||||||
|
|
||||||
|
// 曲奇网盘的文件名称由文件名和扩展名组成,若存在扩展名,则重命名时仅支持更改文件名,扩展名在曲奇服务端保留
|
||||||
|
if srcExt != "" && srcExt == newExt {
|
||||||
|
parts := strings.Split(newName, ".")
|
||||||
|
if len(parts) > 1 {
|
||||||
|
realName = strings.Join(parts[:len(parts)-1], ".")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := d.request("", "/api/dir/renameDir", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"node_id": srcObj.GetID(),
|
||||||
|
"rename": realName,
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, nil); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Object{
|
||||||
|
ID: srcObj.GetID(),
|
||||||
|
Name: newName,
|
||||||
|
Size: srcObj.GetSize(),
|
||||||
|
Modified: time.Now(),
|
||||||
|
Ctime: srcObj.CreateTime(),
|
||||||
|
IsFolder: srcObj.IsDir(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
// 无法从曲奇接口响应中直接获取复制后的文件信息
|
||||||
|
if _, err := d.request("", "/api/node/copy", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"node_id": dstDir.GetID(),
|
||||||
|
"source_quqi_id": d.GroupID,
|
||||||
|
"source_tree_id": "1",
|
||||||
|
"source_node_id": srcObj.GetID(),
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, nil); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
// 暂时不做直接删除,默认都放到回收站。直接删除方法:先调用删除接口放入回收站,在通过回收站接口删除文件
|
||||||
|
if _, err := d.request("", "/api/node/del", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"node_id": obj.GetID(),
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, nil); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
|
// base info
|
||||||
|
sizeStr := strconv.FormatInt(stream.GetSize(), 10)
|
||||||
|
f, err := stream.CacheFullInTempFile()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
md5, err := utils.HashFile(utils.MD5, f)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sha, err := utils.HashFile(utils.SHA256, f)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// init upload
|
||||||
|
var uploadInitResp UploadInitResp
|
||||||
|
_, err = d.request("", "/api/upload/v1/file/init", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"parent_id": dstDir.GetID(),
|
||||||
|
"size": sizeStr,
|
||||||
|
"file_name": stream.GetName(),
|
||||||
|
"md5": md5,
|
||||||
|
"sha": sha,
|
||||||
|
"is_slice": "true",
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, &uploadInitResp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// check exist
|
||||||
|
// if the file already exists in Quqi server, there is no need to actually upload it
|
||||||
|
if uploadInitResp.Data.Exist {
|
||||||
|
// the file name returned by Quqi does not include the extension name
|
||||||
|
nodeName, nodeExt := uploadInitResp.Data.NodeName, rawExt(stream.GetName())
|
||||||
|
if nodeExt != "" {
|
||||||
|
nodeName = nodeName + "." + nodeExt
|
||||||
|
}
|
||||||
|
return &model.Object{
|
||||||
|
ID: strconv.FormatInt(uploadInitResp.Data.NodeID, 10),
|
||||||
|
Name: nodeName,
|
||||||
|
Size: stream.GetSize(),
|
||||||
|
Modified: stream.ModTime(),
|
||||||
|
Ctime: stream.CreateTime(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
// listParts
|
||||||
|
_, err = d.request("upload.quqi.com:20807", "/upload/v1/listParts", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"token": uploadInitResp.Data.Token,
|
||||||
|
"task_id": uploadInitResp.Data.TaskID,
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// get temp key
|
||||||
|
var tempKeyResp TempKeyResp
|
||||||
|
_, err = d.request("upload.quqi.com:20807", "/upload/v1/tempKey", resty.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"token": uploadInitResp.Data.Token,
|
||||||
|
"task_id": uploadInitResp.Data.TaskID,
|
||||||
|
})
|
||||||
|
}, &tempKeyResp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// upload
|
||||||
|
// u, err := url.Parse(fmt.Sprintf("https://%s.cos.ap-shanghai.myqcloud.com", uploadInitResp.Data.Bucket))
|
||||||
|
// b := &cos.BaseURL{BucketURL: u}
|
||||||
|
// client := cos.NewClient(b, &http.Client{
|
||||||
|
// Transport: &cos.CredentialTransport{
|
||||||
|
// Credential: cos.NewTokenCredential(tempKeyResp.Data.Credentials.TmpSecretID, tempKeyResp.Data.Credentials.TmpSecretKey, tempKeyResp.Data.Credentials.SessionToken),
|
||||||
|
// },
|
||||||
|
// })
|
||||||
|
// partSize := int64(1024 * 1024 * 2)
|
||||||
|
// partCount := (stream.GetSize() + partSize - 1) / partSize
|
||||||
|
// for i := 1; i <= int(partCount); i++ {
|
||||||
|
// length := partSize
|
||||||
|
// if i == int(partCount) {
|
||||||
|
// length = stream.GetSize() - (int64(i)-1)*partSize
|
||||||
|
// }
|
||||||
|
// _, err := client.Object.UploadPart(
|
||||||
|
// ctx, uploadInitResp.Data.Key, uploadInitResp.Data.UploadID, i, io.LimitReader(f, partSize), &cos.ObjectUploadPartOptions{
|
||||||
|
// ContentLength: length,
|
||||||
|
// },
|
||||||
|
// )
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, err
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
cfg := &aws.Config{
|
||||||
|
Credentials: credentials.NewStaticCredentials(tempKeyResp.Data.Credentials.TmpSecretID, tempKeyResp.Data.Credentials.TmpSecretKey, tempKeyResp.Data.Credentials.SessionToken),
|
||||||
|
Region: aws.String("ap-shanghai"),
|
||||||
|
Endpoint: aws.String("cos.ap-shanghai.myqcloud.com"),
|
||||||
|
}
|
||||||
|
s, err := session.NewSession(cfg)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
uploader := s3manager.NewUploader(s)
|
||||||
|
buf := make([]byte, 1024*1024*2)
|
||||||
|
for partNumber := int64(1); ; partNumber++ {
|
||||||
|
n, err := io.ReadFull(f, buf)
|
||||||
|
if err != nil && err != io.ErrUnexpectedEOF {
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_, err = uploader.S3.UploadPartWithContext(ctx, &s3.UploadPartInput{
|
||||||
|
UploadId: &uploadInitResp.Data.UploadID,
|
||||||
|
Key: &uploadInitResp.Data.Key,
|
||||||
|
Bucket: &uploadInitResp.Data.Bucket,
|
||||||
|
PartNumber: aws.Int64(partNumber),
|
||||||
|
Body: bytes.NewReader(buf[:n]),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// finish upload
|
||||||
|
var uploadFinishResp UploadFinishResp
|
||||||
|
_, err = d.request("", "/api/upload/v1/file/finish", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"token": uploadInitResp.Data.Token,
|
||||||
|
"task_id": uploadInitResp.Data.TaskID,
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, &uploadFinishResp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// the file name returned by Quqi does not include the extension name
|
||||||
|
nodeName, nodeExt := uploadFinishResp.Data.NodeName, rawExt(stream.GetName())
|
||||||
|
if nodeExt != "" {
|
||||||
|
nodeName = nodeName + "." + nodeExt
|
||||||
|
}
|
||||||
|
return &model.Object{
|
||||||
|
ID: strconv.FormatInt(uploadFinishResp.Data.NodeID, 10),
|
||||||
|
Name: nodeName,
|
||||||
|
Size: stream.GetSize(),
|
||||||
|
Modified: stream.ModTime(),
|
||||||
|
Ctime: stream.CreateTime(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||||
|
// return nil, errs.NotSupport
|
||||||
|
//}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*Quqi)(nil)
|
28
drivers/quqi/meta.go
Normal file
28
drivers/quqi/meta.go
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
package quqi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
driver.RootID
|
||||||
|
Phone string `json:"phone"`
|
||||||
|
Password string `json:"password"`
|
||||||
|
Cookie string `json:"cookie" help:"Cookie can be used on multiple clients at the same time"`
|
||||||
|
CDN bool `json:"cdn" help:"If you enable this option, the download speed can be increased, but there will be some performance loss"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "Quqi",
|
||||||
|
OnlyLocal: true,
|
||||||
|
LocalSort: true,
|
||||||
|
//NoUpload: true,
|
||||||
|
DefaultRoot: "0",
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &Quqi{}
|
||||||
|
})
|
||||||
|
}
|
197
drivers/quqi/types.go
Normal file
197
drivers/quqi/types.go
Normal file
@ -0,0 +1,197 @@
|
|||||||
|
package quqi
|
||||||
|
|
||||||
|
type BaseReqQuery struct {
|
||||||
|
ID string `json:"quqiid"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type BaseReq struct {
|
||||||
|
GroupID string `json:"quqi_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type BaseRes struct {
|
||||||
|
//Data interface{} `json:"data"`
|
||||||
|
Code int `json:"err"`
|
||||||
|
Message string `json:"msg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GroupRes struct {
|
||||||
|
BaseRes
|
||||||
|
Data []*Group `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListRes struct {
|
||||||
|
BaseRes
|
||||||
|
Data *List `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetDocRes struct {
|
||||||
|
BaseRes
|
||||||
|
Data struct {
|
||||||
|
OriginPath string `json:"origin_path"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetDownloadResp struct {
|
||||||
|
BaseRes
|
||||||
|
Data struct {
|
||||||
|
Url string `json:"url"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type MakeDirRes struct {
|
||||||
|
BaseRes
|
||||||
|
Data struct {
|
||||||
|
IsRoot bool `json:"is_root"`
|
||||||
|
NodeID int64 `json:"node_id"`
|
||||||
|
ParentID int64 `json:"parent_id"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type MoveRes struct {
|
||||||
|
BaseRes
|
||||||
|
Data struct {
|
||||||
|
NodeChildNum int64 `json:"node_child_num"`
|
||||||
|
NodeID int64 `json:"node_id"`
|
||||||
|
NodeName string `json:"node_name"`
|
||||||
|
ParentID int64 `json:"parent_id"`
|
||||||
|
GroupID int64 `json:"quqi_id"`
|
||||||
|
TreeID int64 `json:"tree_id"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RenameRes struct {
|
||||||
|
BaseRes
|
||||||
|
Data struct {
|
||||||
|
NodeID int64 `json:"node_id"`
|
||||||
|
GroupID int64 `json:"quqi_id"`
|
||||||
|
Rename string `json:"rename"`
|
||||||
|
TreeID int64 `json:"tree_id"`
|
||||||
|
UpdateTime int64 `json:"updatetime"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CopyRes struct {
|
||||||
|
BaseRes
|
||||||
|
}
|
||||||
|
|
||||||
|
type RemoveRes struct {
|
||||||
|
BaseRes
|
||||||
|
}
|
||||||
|
|
||||||
|
type Group struct {
|
||||||
|
ID int `json:"quqi_id"`
|
||||||
|
Type int `json:"type"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
IsAdministrator int `json:"is_administrator"`
|
||||||
|
Role int `json:"role"`
|
||||||
|
Avatar string `json:"avatar_url"`
|
||||||
|
IsStick int `json:"is_stick"`
|
||||||
|
Nickname string `json:"nickname"`
|
||||||
|
Status int `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type List struct {
|
||||||
|
ListDir
|
||||||
|
Dir []*ListDir `json:"dir"`
|
||||||
|
File []*ListFile `json:"file"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListItem struct {
|
||||||
|
AddTime int64 `json:"add_time"`
|
||||||
|
IsDir int `json:"is_dir"`
|
||||||
|
IsExpand int `json:"is_expand"`
|
||||||
|
IsFinalize int `json:"is_finalize"`
|
||||||
|
LastEditorName string `json:"last_editor_name"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
NodeID int64 `json:"nid"`
|
||||||
|
ParentID int64 `json:"parent_id"`
|
||||||
|
Permission int `json:"permission"`
|
||||||
|
TreeID int64 `json:"tid"`
|
||||||
|
UpdateCNT int64 `json:"update_cnt"`
|
||||||
|
UpdateTime int64 `json:"update_time"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListDir struct {
|
||||||
|
ListItem
|
||||||
|
ChildDocNum int64 `json:"child_doc_num"`
|
||||||
|
DirDetail string `json:"dir_detail"`
|
||||||
|
DirType int `json:"dir_type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListFile struct {
|
||||||
|
ListItem
|
||||||
|
BroadDocType string `json:"broad_doc_type"`
|
||||||
|
CanDisplay bool `json:"can_display"`
|
||||||
|
Detail string `json:"detail"`
|
||||||
|
EXT string `json:"ext"`
|
||||||
|
Filetype string `json:"filetype"`
|
||||||
|
HasMobileThumbnail bool `json:"has_mobile_thumbnail"`
|
||||||
|
HasThumbnail bool `json:"has_thumbnail"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
Version int `json:"version"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadInitResp struct {
|
||||||
|
Data struct {
|
||||||
|
Bucket string `json:"bucket"`
|
||||||
|
Exist bool `json:"exist"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
TaskID string `json:"task_id"`
|
||||||
|
Token string `json:"token"`
|
||||||
|
UploadID string `json:"upload_id"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
NodeID int64 `json:"node_id"`
|
||||||
|
NodeName string `json:"node_name"`
|
||||||
|
ParentID int64 `json:"parent_id"`
|
||||||
|
} `json:"data"`
|
||||||
|
Err int `json:"err"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TempKeyResp struct {
|
||||||
|
Err int `json:"err"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Data struct {
|
||||||
|
ExpiredTime int `json:"expiredTime"`
|
||||||
|
Expiration string `json:"expiration"`
|
||||||
|
Credentials struct {
|
||||||
|
SessionToken string `json:"sessionToken"`
|
||||||
|
TmpSecretID string `json:"tmpSecretId"`
|
||||||
|
TmpSecretKey string `json:"tmpSecretKey"`
|
||||||
|
} `json:"credentials"`
|
||||||
|
RequestID string `json:"requestId"`
|
||||||
|
StartTime int `json:"startTime"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadFinishResp struct {
|
||||||
|
Data struct {
|
||||||
|
NodeID int64 `json:"node_id"`
|
||||||
|
NodeName string `json:"node_name"`
|
||||||
|
ParentID int64 `json:"parent_id"`
|
||||||
|
QuqiID int64 `json:"quqi_id"`
|
||||||
|
TreeID int64 `json:"tree_id"`
|
||||||
|
} `json:"data"`
|
||||||
|
Err int `json:"err"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UrlExchangeResp struct {
|
||||||
|
BaseRes
|
||||||
|
Data struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Mime string `json:"mime"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
DownloadType int `json:"download_type"`
|
||||||
|
ChannelType int `json:"channel_type"`
|
||||||
|
ChannelID int `json:"channel_id"`
|
||||||
|
Url string `json:"url"`
|
||||||
|
ExpiredTime int64 `json:"expired_time"`
|
||||||
|
IsEncrypted bool `json:"is_encrypted"`
|
||||||
|
EncryptedSize int64 `json:"encrypted_size"`
|
||||||
|
EncryptedAlg string `json:"encrypted_alg"`
|
||||||
|
EncryptedKey string `json:"encrypted_key"`
|
||||||
|
PassportID int64 `json:"passport_id"`
|
||||||
|
RequestExpiredTime int64 `json:"request_expired_time"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
316
drivers/quqi/util.go
Normal file
316
drivers/quqi/util.go
Normal file
@ -0,0 +1,316 @@
|
|||||||
|
package quqi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
stdpath "path"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
"github.com/minio/sio"
|
||||||
|
)
|
||||||
|
|
||||||
|
// do others that not defined in Driver interface
|
||||||
|
func (d *Quqi) request(host string, path string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||||
|
var (
|
||||||
|
reqUrl = url.URL{
|
||||||
|
Scheme: "https",
|
||||||
|
Host: "quqi.com",
|
||||||
|
Path: path,
|
||||||
|
}
|
||||||
|
req = base.RestyClient.R()
|
||||||
|
result BaseRes
|
||||||
|
)
|
||||||
|
|
||||||
|
if host != "" {
|
||||||
|
reqUrl.Host = host
|
||||||
|
}
|
||||||
|
req.SetHeaders(map[string]string{
|
||||||
|
"Origin": "https://quqi.com",
|
||||||
|
"Cookie": d.Cookie,
|
||||||
|
})
|
||||||
|
|
||||||
|
if d.GroupID != "" {
|
||||||
|
req.SetQueryParam("quqiid", d.GroupID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := req.Execute(method, reqUrl.String())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// resty.Request.SetResult cannot parse result correctly sometimes
|
||||||
|
err = utils.Json.Unmarshal(res.Body(), &result)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if result.Code != 0 {
|
||||||
|
return nil, errors.New(result.Message)
|
||||||
|
}
|
||||||
|
if resp != nil {
|
||||||
|
err = utils.Json.Unmarshal(res.Body(), resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) login() error {
|
||||||
|
if d.Addition.Cookie != "" {
|
||||||
|
d.Cookie = d.Addition.Cookie
|
||||||
|
}
|
||||||
|
if d.checkLogin() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if d.Cookie != "" {
|
||||||
|
return errors.New("cookie is invalid")
|
||||||
|
}
|
||||||
|
if d.Phone == "" {
|
||||||
|
return errors.New("phone number is empty")
|
||||||
|
}
|
||||||
|
if d.Password == "" {
|
||||||
|
return errs.EmptyPassword
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := d.request("", "/auth/person/v2/login/password", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"phone": d.Phone,
|
||||||
|
"password": base64.StdEncoding.EncodeToString([]byte(d.Password)),
|
||||||
|
})
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var cookies []string
|
||||||
|
for _, cookie := range resp.RawResponse.Cookies() {
|
||||||
|
cookies = append(cookies, fmt.Sprintf("%s=%s", cookie.Name, cookie.Value))
|
||||||
|
}
|
||||||
|
d.Cookie = strings.Join(cookies, ";")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) checkLogin() bool {
|
||||||
|
if _, err := d.request("", "/auth/account/baseInfo", resty.MethodGet, nil, nil); err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// rawExt 保留扩展名大小写
|
||||||
|
func rawExt(name string) string {
|
||||||
|
ext := stdpath.Ext(name)
|
||||||
|
if strings.HasPrefix(ext, ".") {
|
||||||
|
ext = ext[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
return ext
|
||||||
|
}
|
||||||
|
|
||||||
|
// decryptKey 获取密码
|
||||||
|
func decryptKey(encodeKey string) []byte {
|
||||||
|
// 移除非法字符
|
||||||
|
u := strings.ReplaceAll(encodeKey, "[^A-Za-z0-9+\\/]", "")
|
||||||
|
|
||||||
|
// 计算输出字节数组的长度
|
||||||
|
o := len(u)
|
||||||
|
a := 32
|
||||||
|
|
||||||
|
// 创建输出字节数组
|
||||||
|
c := make([]byte, a)
|
||||||
|
|
||||||
|
// 编码循环
|
||||||
|
s := uint32(0) // 累加器
|
||||||
|
f := 0 // 输出数组索引
|
||||||
|
for l := 0; l < o; l++ {
|
||||||
|
r := l & 3 // 取模4,得到当前字符在四字节块中的位置
|
||||||
|
i := u[l] // 当前字符的ASCII码
|
||||||
|
|
||||||
|
// 编码当前字符
|
||||||
|
switch {
|
||||||
|
case i >= 65 && i < 91: // 大写字母
|
||||||
|
s |= uint32(i-65) << uint32(6*(3-r))
|
||||||
|
case i >= 97 && i < 123: // 小写字母
|
||||||
|
s |= uint32(i-71) << uint32(6*(3-r))
|
||||||
|
case i >= 48 && i < 58: // 数字
|
||||||
|
s |= uint32(i+4) << uint32(6*(3-r))
|
||||||
|
case i == 43: // 加号
|
||||||
|
s |= uint32(62) << uint32(6*(3-r))
|
||||||
|
case i == 47: // 斜杠
|
||||||
|
s |= uint32(63) << uint32(6*(3-r))
|
||||||
|
}
|
||||||
|
|
||||||
|
// 如果累加器已经包含了四个字符,或者是最后一个字符,则写入输出数组
|
||||||
|
if r == 3 || l == o-1 {
|
||||||
|
for e := 0; e < 3 && f < a; e, f = e+1, f+1 {
|
||||||
|
c[f] = byte(s >> (16 >> e & 24) & 255)
|
||||||
|
}
|
||||||
|
s = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) linkFromPreview(id string) (*model.Link, error) {
|
||||||
|
var getDocResp GetDocRes
|
||||||
|
if _, err := d.request("", "/api/doc/getDoc", resty.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"node_id": id,
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
})
|
||||||
|
}, &getDocResp); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if getDocResp.Data.OriginPath == "" {
|
||||||
|
return nil, errors.New("cannot get link from preview")
|
||||||
|
}
|
||||||
|
return &model.Link{
|
||||||
|
URL: getDocResp.Data.OriginPath,
|
||||||
|
Header: http.Header{
|
||||||
|
"Origin": []string{"https://quqi.com"},
|
||||||
|
"Cookie": []string{d.Cookie},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) linkFromDownload(id string) (*model.Link, error) {
|
||||||
|
var getDownloadResp GetDownloadResp
|
||||||
|
if _, err := d.request("", "/api/doc/getDownload", resty.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"quqi_id": d.GroupID,
|
||||||
|
"tree_id": "1",
|
||||||
|
"node_id": id,
|
||||||
|
"url_type": "undefined",
|
||||||
|
"entry_type": "undefined",
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
"no_redirect": "1",
|
||||||
|
})
|
||||||
|
}, &getDownloadResp); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if getDownloadResp.Data.Url == "" {
|
||||||
|
return nil, errors.New("cannot get link from download")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Link{
|
||||||
|
URL: getDownloadResp.Data.Url,
|
||||||
|
Header: http.Header{
|
||||||
|
"Origin": []string{"https://quqi.com"},
|
||||||
|
"Cookie": []string{d.Cookie},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Quqi) linkFromCDN(id string) (*model.Link, error) {
|
||||||
|
downloadLink, err := d.linkFromDownload(id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var urlExchangeResp UrlExchangeResp
|
||||||
|
if _, err = d.request("api.quqi.com", "/preview/downloadInfo/url/exchange", resty.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParam("url", downloadLink.URL)
|
||||||
|
}, &urlExchangeResp); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if urlExchangeResp.Data.Url == "" {
|
||||||
|
return nil, errors.New("cannot get link from cdn")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 假设存在未加密的情况
|
||||||
|
if !urlExchangeResp.Data.IsEncrypted {
|
||||||
|
return &model.Link{
|
||||||
|
URL: urlExchangeResp.Data.Url,
|
||||||
|
Header: http.Header{
|
||||||
|
"Origin": []string{"https://quqi.com"},
|
||||||
|
"Cookie": []string{d.Cookie},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 根据sio(https://github.com/minio/sio/blob/master/DARE.md)描述及实际测试,得出以下结论:
|
||||||
|
// 1. 加密后大小(encrypted_size)-原始文件大小(size) = 加密包的头大小+身份验证标识 = (16+16) * N -> N为加密包的数量
|
||||||
|
// 2. 原始文件大小(size)+64*1024-1 / (64*1024) = N -> 每个包的有效负载为64K
|
||||||
|
remoteClosers := utils.EmptyClosers()
|
||||||
|
payloadSize := int64(1 << 16)
|
||||||
|
expiration := time.Until(time.Unix(urlExchangeResp.Data.ExpiredTime, 0))
|
||||||
|
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
||||||
|
encryptedOffset := httpRange.Start / payloadSize * (payloadSize + 32)
|
||||||
|
decryptedOffset := httpRange.Start % payloadSize
|
||||||
|
encryptedLength := (httpRange.Length+httpRange.Start+payloadSize-1)/payloadSize*(payloadSize+32) - encryptedOffset
|
||||||
|
if httpRange.Length < 0 {
|
||||||
|
encryptedLength = httpRange.Length
|
||||||
|
} else {
|
||||||
|
if httpRange.Length+httpRange.Start >= urlExchangeResp.Data.Size || encryptedLength+encryptedOffset >= urlExchangeResp.Data.EncryptedSize {
|
||||||
|
encryptedLength = -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//log.Debugf("size: %d\tencrypted_size: %d", urlExchangeResp.Data.Size, urlExchangeResp.Data.EncryptedSize)
|
||||||
|
//log.Debugf("http range offset: %d, length: %d", httpRange.Start, httpRange.Length)
|
||||||
|
//log.Debugf("encrypted offset: %d, length: %d, decrypted offset: %d", encryptedOffset, encryptedLength, decryptedOffset)
|
||||||
|
|
||||||
|
rrc, err := stream.GetRangeReadCloserFromLink(urlExchangeResp.Data.EncryptedSize, &model.Link{
|
||||||
|
URL: urlExchangeResp.Data.Url,
|
||||||
|
Header: http.Header{
|
||||||
|
"Origin": []string{"https://quqi.com"},
|
||||||
|
"Cookie": []string{d.Cookie},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rc, err := rrc.RangeRead(ctx, http_range.Range{Start: encryptedOffset, Length: encryptedLength})
|
||||||
|
remoteClosers.AddClosers(rrc.GetClosers())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
decryptReader, err := sio.DecryptReader(rc, sio.Config{
|
||||||
|
MinVersion: sio.Version10,
|
||||||
|
MaxVersion: sio.Version20,
|
||||||
|
CipherSuites: []byte{sio.CHACHA20_POLY1305, sio.AES_256_GCM},
|
||||||
|
Key: decryptKey(urlExchangeResp.Data.EncryptedKey),
|
||||||
|
SequenceNumber: uint32(httpRange.Start / payloadSize),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
bufferReader := bufio.NewReader(decryptReader)
|
||||||
|
bufferReader.Discard(int(decryptedOffset))
|
||||||
|
|
||||||
|
return utils.NewReadCloser(bufferReader, func() error {
|
||||||
|
return nil
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Link{
|
||||||
|
Header: http.Header{
|
||||||
|
"Origin": []string{"https://quqi.com"},
|
||||||
|
"Cookie": []string{d.Cookie},
|
||||||
|
},
|
||||||
|
RangeReadCloser: &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: remoteClosers},
|
||||||
|
Expiration: &expiration,
|
||||||
|
}, nil
|
||||||
|
}
|
62
drivers/s3/doge.go
Normal file
62
drivers/s3/doge.go
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
package s3
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/hmac"
|
||||||
|
"crypto/sha1"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TmpTokenResponse struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Data TmpTokenResponseData `json:"data,omitempty"`
|
||||||
|
}
|
||||||
|
type TmpTokenResponseData struct {
|
||||||
|
Credentials Credentials `json:"Credentials"`
|
||||||
|
}
|
||||||
|
type Credentials struct {
|
||||||
|
AccessKeyId string `json:"accessKeyId,omitempty"`
|
||||||
|
SecretAccessKey string `json:"secretAccessKey,omitempty"`
|
||||||
|
SessionToken string `json:"sessionToken,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func getCredentials(AccessKey, SecretKey string) (rst Credentials, err error) {
|
||||||
|
apiPath := "/auth/tmp_token.json"
|
||||||
|
reqBody, err := json.Marshal(map[string]interface{}{"channel": "OSS_FULL", "scopes": []string{"*"}})
|
||||||
|
if err != nil {
|
||||||
|
return rst, err
|
||||||
|
}
|
||||||
|
|
||||||
|
signStr := apiPath + "\n" + string(reqBody)
|
||||||
|
hmacObj := hmac.New(sha1.New, []byte(SecretKey))
|
||||||
|
hmacObj.Write([]byte(signStr))
|
||||||
|
sign := hex.EncodeToString(hmacObj.Sum(nil))
|
||||||
|
Authorization := "TOKEN " + AccessKey + ":" + sign
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", "https://api.dogecloud.com"+apiPath, strings.NewReader(string(reqBody)))
|
||||||
|
if err != nil {
|
||||||
|
return rst, err
|
||||||
|
}
|
||||||
|
req.Header.Add("Content-Type", "application/json")
|
||||||
|
req.Header.Add("Authorization", Authorization)
|
||||||
|
client := http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return rst, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
ret, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return rst, err
|
||||||
|
}
|
||||||
|
var tmpTokenResp TmpTokenResponse
|
||||||
|
err = json.Unmarshal(ret, &tmpTokenResp)
|
||||||
|
if err != nil {
|
||||||
|
return rst, err
|
||||||
|
}
|
||||||
|
return tmpTokenResp.Data.Credentials, nil
|
||||||
|
}
|
@ -4,13 +4,14 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
|
||||||
"io"
|
"io"
|
||||||
"net/url"
|
"net/url"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/aws/aws-sdk-go/aws/session"
|
"github.com/aws/aws-sdk-go/aws/session"
|
||||||
@ -25,10 +26,12 @@ type S3 struct {
|
|||||||
Session *session.Session
|
Session *session.Session
|
||||||
client *s3.S3
|
client *s3.S3
|
||||||
linkClient *s3.S3
|
linkClient *s3.S3
|
||||||
|
|
||||||
|
config driver.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) Config() driver.Config {
|
func (d *S3) Config() driver.Config {
|
||||||
return config
|
return d.config
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) GetAddition() driver.Additional {
|
func (d *S3) GetAddition() driver.Additional {
|
||||||
@ -104,7 +107,7 @@ func (d *S3) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) e
|
|||||||
},
|
},
|
||||||
Reader: io.NopCloser(bytes.NewReader([]byte{})),
|
Reader: io.NopCloser(bytes.NewReader([]byte{})),
|
||||||
Mimetype: "application/octet-stream",
|
Mimetype: "application/octet-stream",
|
||||||
}, func(int) {})
|
}, func(float64) {})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *S3) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
@ -22,15 +22,25 @@ type Addition struct {
|
|||||||
AddFilenameToDisposition bool `json:"add_filename_to_disposition" help:"Add filename to Content-Disposition header."`
|
AddFilenameToDisposition bool `json:"add_filename_to_disposition" help:"Add filename to Content-Disposition header."`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "S3",
|
|
||||||
DefaultRoot: "/",
|
|
||||||
LocalSort: true,
|
|
||||||
CheckStatus: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
return &S3{}
|
return &S3{
|
||||||
|
config: driver.Config{
|
||||||
|
Name: "S3",
|
||||||
|
DefaultRoot: "/",
|
||||||
|
LocalSort: true,
|
||||||
|
CheckStatus: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &S3{
|
||||||
|
config: driver.Config{
|
||||||
|
Name: "Doge",
|
||||||
|
DefaultRoot: "/",
|
||||||
|
LocalSort: true,
|
||||||
|
CheckStatus: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -21,13 +21,21 @@ import (
|
|||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
|
|
||||||
func (d *S3) initSession() error {
|
func (d *S3) initSession() error {
|
||||||
|
var err error
|
||||||
|
accessKeyID, secretAccessKey, sessionToken := d.AccessKeyID, d.SecretAccessKey, d.SessionToken
|
||||||
|
if d.config.Name == "Doge" {
|
||||||
|
credentialsTmp, err := getCredentials(d.AccessKeyID, d.SecretAccessKey)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
accessKeyID, secretAccessKey, sessionToken = credentialsTmp.AccessKeyId, credentialsTmp.SecretAccessKey, credentialsTmp.SessionToken
|
||||||
|
}
|
||||||
cfg := &aws.Config{
|
cfg := &aws.Config{
|
||||||
Credentials: credentials.NewStaticCredentials(d.AccessKeyID, d.SecretAccessKey, d.SessionToken),
|
Credentials: credentials.NewStaticCredentials(accessKeyID, secretAccessKey, sessionToken),
|
||||||
Region: &d.Region,
|
Region: &d.Region,
|
||||||
Endpoint: &d.Endpoint,
|
Endpoint: &d.Endpoint,
|
||||||
S3ForcePathStyle: aws.Bool(d.ForcePathStyle),
|
S3ForcePathStyle: aws.Bool(d.ForcePathStyle),
|
||||||
}
|
}
|
||||||
var err error
|
|
||||||
d.Session, err = session.NewSession(cfg)
|
d.Session, err = session.NewSession(cfg)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -19,6 +18,7 @@ type Seafile struct {
|
|||||||
Addition
|
Addition
|
||||||
|
|
||||||
authorization string
|
authorization string
|
||||||
|
libraryMap map[string]*LibraryInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) Config() driver.Config {
|
func (d *Seafile) Config() driver.Config {
|
||||||
@ -31,6 +31,8 @@ func (d *Seafile) GetAddition() driver.Additional {
|
|||||||
|
|
||||||
func (d *Seafile) Init(ctx context.Context) error {
|
func (d *Seafile) Init(ctx context.Context) error {
|
||||||
d.Address = strings.TrimSuffix(d.Address, "/")
|
d.Address = strings.TrimSuffix(d.Address, "/")
|
||||||
|
d.RootFolderPath = utils.FixAndCleanPath(d.RootFolderPath)
|
||||||
|
d.libraryMap = make(map[string]*LibraryInfo)
|
||||||
return d.getToken()
|
return d.getToken()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,10 +40,37 @@ func (d *Seafile) Drop(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *Seafile) List(ctx context.Context, dir model.Obj, args model.ListArgs) (result []model.Obj, err error) {
|
||||||
path := dir.GetPath()
|
path := dir.GetPath()
|
||||||
|
if path == d.RootFolderPath {
|
||||||
|
libraries, err := d.listLibraries()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if path == "/" && d.RepoId == "" {
|
||||||
|
return utils.SliceConvert(libraries, func(f LibraryItemResp) (model.Obj, error) {
|
||||||
|
return &model.Object{
|
||||||
|
Name: f.Name,
|
||||||
|
Modified: time.Unix(f.Modified, 0),
|
||||||
|
Size: f.Size,
|
||||||
|
IsFolder: true,
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var repo *LibraryInfo
|
||||||
|
repo, path, err = d.getRepoAndPath(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if repo.Encrypted {
|
||||||
|
err = d.decryptLibrary(repo)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
var resp []RepoDirItemResp
|
var resp []RepoDirItemResp
|
||||||
_, err := d.request(http.MethodGet, fmt.Sprintf("/api2/repos/%s/dir/", d.Addition.RepoId), func(req *resty.Request) {
|
_, err = d.request(http.MethodGet, fmt.Sprintf("/api2/repos/%s/dir/", repo.Id), func(req *resty.Request) {
|
||||||
req.SetResult(&resp).SetQueryParams(map[string]string{
|
req.SetResult(&resp).SetQueryParams(map[string]string{
|
||||||
"p": path,
|
"p": path,
|
||||||
})
|
})
|
||||||
@ -63,9 +92,13 @@ func (d *Seafile) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *Seafile) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
res, err := d.request(http.MethodGet, fmt.Sprintf("/api2/repos/%s/file/", d.Addition.RepoId), func(req *resty.Request) {
|
repo, path, err := d.getRepoAndPath(file.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
res, err := d.request(http.MethodGet, fmt.Sprintf("/api2/repos/%s/file/", repo.Id), func(req *resty.Request) {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"p": file.GetPath(),
|
"p": path,
|
||||||
"reuse": "1",
|
"reuse": "1",
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -78,9 +111,14 @@ func (d *Seafile) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *Seafile) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
_, err := d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/dir/", d.Addition.RepoId), func(req *resty.Request) {
|
repo, path, err := d.getRepoAndPath(parentDir.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
path, _ = utils.JoinBasePath(path, dirName)
|
||||||
|
_, err = d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/dir/", repo.Id), func(req *resty.Request) {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"p": filepath.Join(parentDir.GetPath(), dirName),
|
"p": path,
|
||||||
}).SetFormData(map[string]string{
|
}).SetFormData(map[string]string{
|
||||||
"operation": "mkdir",
|
"operation": "mkdir",
|
||||||
})
|
})
|
||||||
@ -89,22 +127,34 @@ func (d *Seafile) MakeDir(ctx context.Context, parentDir model.Obj, dirName stri
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Seafile) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
_, err := d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/file/", d.Addition.RepoId), func(req *resty.Request) {
|
repo, path, err := d.getRepoAndPath(srcObj.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dstRepo, dstPath, err := d.getRepoAndPath(dstDir.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/file/", repo.Id), func(req *resty.Request) {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"p": srcObj.GetPath(),
|
"p": path,
|
||||||
}).SetFormData(map[string]string{
|
}).SetFormData(map[string]string{
|
||||||
"operation": "move",
|
"operation": "move",
|
||||||
"dst_repo": d.Addition.RepoId,
|
"dst_repo": dstRepo.Id,
|
||||||
"dst_dir": dstDir.GetPath(),
|
"dst_dir": dstPath,
|
||||||
})
|
})
|
||||||
}, true)
|
}, true)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *Seafile) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
_, err := d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/file/", d.Addition.RepoId), func(req *resty.Request) {
|
repo, path, err := d.getRepoAndPath(srcObj.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/file/", repo.Id), func(req *resty.Request) {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"p": srcObj.GetPath(),
|
"p": path,
|
||||||
}).SetFormData(map[string]string{
|
}).SetFormData(map[string]string{
|
||||||
"operation": "rename",
|
"operation": "rename",
|
||||||
"newname": newName,
|
"newname": newName,
|
||||||
@ -114,31 +164,47 @@ func (d *Seafile) Rename(ctx context.Context, srcObj model.Obj, newName string)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Seafile) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
_, err := d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/file/", d.Addition.RepoId), func(req *resty.Request) {
|
repo, path, err := d.getRepoAndPath(srcObj.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dstRepo, dstPath, err := d.getRepoAndPath(dstDir.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/file/", repo.Id), func(req *resty.Request) {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"p": srcObj.GetPath(),
|
"p": path,
|
||||||
}).SetFormData(map[string]string{
|
}).SetFormData(map[string]string{
|
||||||
"operation": "copy",
|
"operation": "copy",
|
||||||
"dst_repo": d.Addition.RepoId,
|
"dst_repo": dstRepo.Id,
|
||||||
"dst_dir": dstDir.GetPath(),
|
"dst_dir": dstPath,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *Seafile) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
_, err := d.request(http.MethodDelete, fmt.Sprintf("/api2/repos/%s/file/", d.Addition.RepoId), func(req *resty.Request) {
|
repo, path, err := d.getRepoAndPath(obj.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = d.request(http.MethodDelete, fmt.Sprintf("/api2/repos/%s/file/", repo.Id), func(req *resty.Request) {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"p": obj.GetPath(),
|
"p": path,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Seafile) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Seafile) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
res, err := d.request(http.MethodGet, fmt.Sprintf("/api2/repos/%s/upload-link/", d.Addition.RepoId), func(req *resty.Request) {
|
repo, path, err := d.getRepoAndPath(dstDir.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
res, err := d.request(http.MethodGet, fmt.Sprintf("/api2/repos/%s/upload-link/", repo.Id), func(req *resty.Request) {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"p": dstDir.GetPath(),
|
"p": path,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -150,7 +216,7 @@ func (d *Seafile) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
_, err = d.request(http.MethodPost, u, func(req *resty.Request) {
|
_, err = d.request(http.MethodPost, u, func(req *resty.Request) {
|
||||||
req.SetFileReader("file", stream.GetName(), stream).
|
req.SetFileReader("file", stream.GetName(), stream).
|
||||||
SetFormData(map[string]string{
|
SetFormData(map[string]string{
|
||||||
"parent_dir": dstDir.GetPath(),
|
"parent_dir": path,
|
||||||
"replace": "1",
|
"replace": "1",
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -11,7 +11,8 @@ type Addition struct {
|
|||||||
Address string `json:"address" required:"true"`
|
Address string `json:"address" required:"true"`
|
||||||
UserName string `json:"username" required:"true"`
|
UserName string `json:"username" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
RepoId string `json:"repoId" required:"true"`
|
RepoId string `json:"repoId" required:"false"`
|
||||||
|
RepoPwd string `json:"repoPwd" required:"false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -1,14 +1,44 @@
|
|||||||
package seafile
|
package seafile
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
type AuthTokenResp struct {
|
type AuthTokenResp struct {
|
||||||
Token string `json:"token"`
|
Token string `json:"token"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type RepoDirItemResp struct {
|
type RepoItemResp struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
Type string `json:"type"` // dir, file
|
Type string `json:"type"` // repo, dir, file
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Size int64 `json:"size"`
|
Size int64 `json:"size"`
|
||||||
Modified int64 `json:"mtime"`
|
Modified int64 `json:"mtime"`
|
||||||
Permission string `json:"permission"`
|
Permission string `json:"permission"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type LibraryItemResp struct {
|
||||||
|
RepoItemResp
|
||||||
|
OwnerContactEmail string `json:"owner_contact_email"`
|
||||||
|
OwnerName string `json:"owner_name"`
|
||||||
|
Owner string `json:"owner"`
|
||||||
|
ModifierEmail string `json:"modifier_email"`
|
||||||
|
ModifierContactEmail string `json:"modifier_contact_email"`
|
||||||
|
ModifierName string `json:"modifier_name"`
|
||||||
|
Virtual bool `json:"virtual"`
|
||||||
|
MtimeRelative string `json:"mtime_relative"`
|
||||||
|
Encrypted bool `json:"encrypted"`
|
||||||
|
Version int `json:"version"`
|
||||||
|
HeadCommitId string `json:"head_commit_id"`
|
||||||
|
Root string `json:"root"`
|
||||||
|
Salt string `json:"salt"`
|
||||||
|
SizeFormatted string `json:"size_formatted"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RepoDirItemResp struct {
|
||||||
|
RepoItemResp
|
||||||
|
}
|
||||||
|
|
||||||
|
type LibraryInfo struct {
|
||||||
|
LibraryItemResp
|
||||||
|
decryptedTime time.Time
|
||||||
|
decryptedSuccess bool
|
||||||
|
}
|
@ -1,8 +1,13 @@
|
|||||||
package seafile
|
package seafile
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
@ -60,3 +65,110 @@ func (d *Seafile) request(method string, pathname string, callback base.ReqCallb
|
|||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Seafile) getRepoAndPath(fullPath string) (repo *LibraryInfo, path string, err error) {
|
||||||
|
libraryMap := d.libraryMap
|
||||||
|
repoId := d.Addition.RepoId
|
||||||
|
if repoId != "" {
|
||||||
|
if len(repoId) == 36 /* uuid */ {
|
||||||
|
for _, library := range libraryMap {
|
||||||
|
if library.Id == repoId {
|
||||||
|
return library, fullPath, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var repoName string
|
||||||
|
str := fullPath[1:]
|
||||||
|
pos := strings.IndexRune(str, '/')
|
||||||
|
if pos == -1 {
|
||||||
|
repoName = str
|
||||||
|
} else {
|
||||||
|
repoName = str[:pos]
|
||||||
|
}
|
||||||
|
path = utils.FixAndCleanPath(fullPath[1+len(repoName):])
|
||||||
|
if library, ok := libraryMap[repoName]; ok {
|
||||||
|
return library, path, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, "", errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Seafile) listLibraries() (resp []LibraryItemResp, err error) {
|
||||||
|
repoId := d.Addition.RepoId
|
||||||
|
if repoId == "" {
|
||||||
|
_, err = d.request(http.MethodGet, "/api2/repos/", func(req *resty.Request) {
|
||||||
|
req.SetResult(&resp)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
var oneResp LibraryItemResp
|
||||||
|
_, err = d.request(http.MethodGet, fmt.Sprintf("/api2/repos/%s/", repoId), func(req *resty.Request) {
|
||||||
|
req.SetResult(&oneResp)
|
||||||
|
})
|
||||||
|
if err == nil {
|
||||||
|
resp = append(resp, oneResp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
libraryMap := make(map[string]*LibraryInfo)
|
||||||
|
var putLibraryMap func(library LibraryItemResp, index int)
|
||||||
|
putLibraryMap = func(library LibraryItemResp, index int) {
|
||||||
|
name := library.Name
|
||||||
|
if index > 0 {
|
||||||
|
name = fmt.Sprintf("%s (%d)", name, index)
|
||||||
|
}
|
||||||
|
if _, exist := libraryMap[name]; exist {
|
||||||
|
putLibraryMap(library, index+1)
|
||||||
|
} else {
|
||||||
|
libraryInfo := LibraryInfo{}
|
||||||
|
data, _ := utils.Json.Marshal(library)
|
||||||
|
_ = utils.Json.Unmarshal(data, &libraryInfo)
|
||||||
|
libraryMap[name] = &libraryInfo
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, library := range resp {
|
||||||
|
putLibraryMap(library, 0)
|
||||||
|
}
|
||||||
|
d.libraryMap = libraryMap
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var repoPwdNotConfigured = errors.New("library password not configured")
|
||||||
|
var repoPwdIncorrect = errors.New("library password is incorrect")
|
||||||
|
|
||||||
|
func (d *Seafile) decryptLibrary(repo *LibraryInfo) (err error) {
|
||||||
|
if !repo.Encrypted {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if d.RepoPwd == "" {
|
||||||
|
return repoPwdNotConfigured
|
||||||
|
}
|
||||||
|
now := time.Now()
|
||||||
|
decryptedTime := repo.decryptedTime
|
||||||
|
if repo.decryptedSuccess {
|
||||||
|
if now.Sub(decryptedTime).Minutes() <= 30 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if now.Sub(decryptedTime).Seconds() <= 10 {
|
||||||
|
return repoPwdIncorrect
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var resp string
|
||||||
|
_, err = d.request(http.MethodPost, fmt.Sprintf("/api2/repos/%s/", repo.Id), func(req *resty.Request) {
|
||||||
|
req.SetResult(&resp).SetFormData(map[string]string{
|
||||||
|
"password": d.RepoPwd,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
repo.decryptedTime = time.Now()
|
||||||
|
if err != nil || !strings.Contains(resp, "success") {
|
||||||
|
repo.decryptedSuccess = false
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
repo.decryptedSuccess = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,6 +11,7 @@ type Addition struct {
|
|||||||
PrivateKey string `json:"private_key" type:"text"`
|
PrivateKey string `json:"private_key" type:"text"`
|
||||||
Password string `json:"password"`
|
Password string `json:"password"`
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
|
IgnoreSymlinkError bool `json:"ignore_symlink_error" default:"false" info:"Ignore symlink error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -30,6 +30,14 @@ func (d *SFTP) fileToObj(f os.FileInfo, dir string) (model.Obj, error) {
|
|||||||
}
|
}
|
||||||
_f, err := d.client.Stat(target)
|
_f, err := d.client.Stat(target)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if d.IgnoreSymlinkError {
|
||||||
|
return &model.Object{
|
||||||
|
Name: f.Name(),
|
||||||
|
Size: f.Size(),
|
||||||
|
Modified: f.ModTime(),
|
||||||
|
IsFolder: f.IsDir(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// set basic info
|
// set basic info
|
||||||
|
@ -3,12 +3,12 @@ package teambition
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -128,11 +128,23 @@ func (d *Teambition) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
if d.UseS3UploadMethod {
|
if d.UseS3UploadMethod {
|
||||||
return d.newUpload(ctx, dstDir, stream, up)
|
return d.newUpload(ctx, dstDir, stream, up)
|
||||||
}
|
}
|
||||||
res, err := d.request("/api/v2/users/me", http.MethodGet, nil, nil)
|
var (
|
||||||
if err != nil {
|
token string
|
||||||
return err
|
err error
|
||||||
|
)
|
||||||
|
if d.isInternational() {
|
||||||
|
res, err := d.request("/projects", http.MethodGet, nil, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
token = getBetweenStr(string(res), "strikerAuth":"", "","phoneForLogin")
|
||||||
|
} else {
|
||||||
|
res, err := d.request("/api/v2/users/me", http.MethodGet, nil, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
token = utils.Json.Get(res, "strikerAuth").ToString()
|
||||||
}
|
}
|
||||||
token := utils.Json.Get(res, "strikerAuth").ToString()
|
|
||||||
var newFile *FileUpload
|
var newFile *FileUpload
|
||||||
if stream.GetSize() <= 20971520 {
|
if stream.GetSize() <= 20971520 {
|
||||||
// post upload
|
// post upload
|
||||||
|
18
drivers/teambition/help.go
Normal file
18
drivers/teambition/help.go
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
package teambition
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
func getBetweenStr(str, start, end string) string {
|
||||||
|
n := strings.Index(str, start)
|
||||||
|
if n == -1 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
n = n + len(start)
|
||||||
|
str = string([]byte(str)[n:])
|
||||||
|
m := strings.Index(str, end)
|
||||||
|
if m == -1 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
str = string([]byte(str)[:m])
|
||||||
|
return str
|
||||||
|
}
|
@ -126,19 +126,20 @@ func (d *Teambition) upload(ctx context.Context, file model.FileStreamer, token
|
|||||||
prefix = "us-tcs"
|
prefix = "us-tcs"
|
||||||
}
|
}
|
||||||
var newFile FileUpload
|
var newFile FileUpload
|
||||||
_, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().
|
||||||
SetContext(ctx).
|
SetContext(ctx).
|
||||||
SetResult(&newFile).SetHeader("Authorization", token).
|
SetResult(&newFile).SetHeader("Authorization", token).
|
||||||
SetMultipartFormData(map[string]string{
|
SetMultipartFormData(map[string]string{
|
||||||
"name": file.GetName(),
|
"name": file.GetName(),
|
||||||
"type": file.GetMimetype(),
|
"type": file.GetMimetype(),
|
||||||
"size": strconv.FormatInt(file.GetSize(), 10),
|
"size": strconv.FormatInt(file.GetSize(), 10),
|
||||||
//"lastModifiedDate": "",
|
"lastModifiedDate": time.Now().Format("Mon Jan 02 2006 15:04:05 GMT+0800 (中国标准时间)"),
|
||||||
}).SetMultipartField("file", file.GetName(), file.GetMimetype(), file).
|
}).SetMultipartField("file", file.GetName(), file.GetMimetype(), file).
|
||||||
Post(fmt.Sprintf("https://%s.teambition.net/upload", prefix))
|
Post(fmt.Sprintf("https://%s.teambition.net/upload", prefix))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
log.Debugf("[teambition] upload response: %s", res.String())
|
||||||
return &newFile, nil
|
return &newFile, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -189,7 +190,7 @@ func (d *Teambition) chunkUpload(ctx context.Context, file model.FileStreamer, t
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
up(i * 100 / newChunk.Chunks)
|
up(float64(i) * 100 / float64(newChunk.Chunks))
|
||||||
}
|
}
|
||||||
_, err = base.RestyClient.R().SetHeader("Authorization", token).Post(
|
_, err = base.RestyClient.R().SetHeader("Authorization", token).Post(
|
||||||
fmt.Sprintf("https://%s.teambition.net/upload/chunk/%s",
|
fmt.Sprintf("https://%s.teambition.net/upload/chunk/%s",
|
||||||
@ -243,6 +244,9 @@ func (d *Teambition) newUpload(ctx context.Context, dstDir model.Obj, stream mod
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
uploader := s3manager.NewUploader(ss)
|
uploader := s3manager.NewUploader(ss)
|
||||||
|
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||||
|
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||||
|
}
|
||||||
input := &s3manager.UploadInput{
|
input := &s3manager.UploadInput{
|
||||||
Bucket: &uploadToken.Upload.Bucket,
|
Bucket: &uploadToken.Upload.Bucket,
|
||||||
Key: &uploadToken.Upload.Key,
|
Key: &uploadToken.Upload.Key,
|
||||||
|
@ -213,7 +213,7 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
}
|
}
|
||||||
log.Debugln(res.String())
|
log.Debugln(res.String())
|
||||||
if len(precreateResp.BlockList) > 0 {
|
if len(precreateResp.BlockList) > 0 {
|
||||||
up(i * 100 / len(precreateResp.BlockList))
|
up(float64(i) * 100 / float64(len(precreateResp.BlockList)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_, err = d.create(rawPath, stream.GetSize(), 0, precreateResp.Uploadid, block_list_str)
|
_, err = d.create(rawPath, stream.GetSize(), 0, precreateResp.Uploadid, block_list_str)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user