Compare commits
257 Commits
Author | SHA1 | Date | |
---|---|---|---|
226c34929a | |||
027edcbe53 | |||
fd51f34efa | |||
bdd9774aa7 | |||
258b8f520f | |||
99f39410f2 | |||
267120a8c8 | |||
5eff8cc7bf | |||
d5ec998699 | |||
23f3178f39 | |||
cafdb4d407 | |||
0d4c63e9ff | |||
5c5d8378e5 | |||
2be0c3d1a0 | |||
bdcf450203 | |||
c2633dd443 | |||
11b6a6012f | |||
59e02287b2 | |||
bb40e2e2cd | |||
ab22cf8233 | |||
880cc7abca | |||
b60da9732f | |||
e04114d102 | |||
51bcf83511 | |||
25b4b55ee1 | |||
6812ec9a6d | |||
31a7470865 | |||
687124c81d | |||
e4439e66b9 | |||
7fd4ac7851 | |||
6745dcc139 | |||
aa1082a56c | |||
ed149be84b | |||
040dc14ee6 | |||
4dce53d72b | |||
365fc40dfe | |||
5994c17b4e | |||
42243b1517 | |||
48916cdedf | |||
5ecf5e823c | |||
c218b5701e | |||
77d0c78bfd | |||
db5c601cfe | |||
221cdf3611 | |||
40b0e66efe | |||
b72e85a73a | |||
6aaf5975c6 | |||
bb2aec20e4 | |||
d7aa1608ac | |||
db99224126 | |||
b8bd14f99b | |||
331885ed64 | |||
cf58ab3a78 | |||
33ba7f1521 | |||
201e25c17f | |||
ecefa5e0eb | |||
650b03aeb1 | |||
7341846499 | |||
a3908fd9a6 | |||
2a035302b2 | |||
016e169c41 | |||
088120df82 | |||
aa45a82914 | |||
5084d98398 | |||
fa15c576f0 | |||
2d3605c684 | |||
492b49d77a | |||
94915b2148 | |||
2dec756f23 | |||
4c0cffd29b | |||
25c5e075a9 | |||
398c04386a | |||
12b429584e | |||
150dcc2147 | |||
0ba754fd40 | |||
28d2367a87 | |||
a4ad98ee3e | |||
1c01dc6839 | |||
c3c5843dce | |||
6c38c5972d | |||
0a46979c51 | |||
67c93eed2b | |||
f58de9923a | |||
2671c876f1 | |||
e707fa38f1 | |||
b803b0070e | |||
64ceb5afb6 | |||
10c7ebb1c0 | |||
d0cda62703 | |||
ce0b99a510 | |||
34a148c83d | |||
4955d8cec8 | |||
216e3909f3 | |||
a701432b8b | |||
a2dc45a80b | |||
48ac23c8de | |||
2830575490 | |||
e8538bd215 | |||
c3e43ff605 | |||
5f19d73fcc | |||
bdf4b52885 | |||
6106a2d4cc | |||
b6451451b1 | |||
f06d2c0348 | |||
b7ae56b109 | |||
5d9167d676 | |||
1b42b9627c | |||
bb58b94a10 | |||
ffce61d227 | |||
0310b70d90 | |||
73f0b135b6 | |||
8316f81e41 | |||
cdbfda8921 | |||
9667832b32 | |||
b36d38f63f | |||
c8317250c1 | |||
0242f36e1c | |||
40a68bcee6 | |||
92713ef5c4 | |||
716d33fddd | |||
c9fa3d7cd6 | |||
4874c9e43b | |||
34ada81582 | |||
ba716ae325 | |||
d4f9c4b6af | |||
b910b8917f | |||
d92744e673 | |||
868b0ec25c | |||
e21edf98e2 | |||
d2514d236f | |||
34b6785fab | |||
48f50a2ceb | |||
74887922b4 | |||
bcb24d61ea | |||
db1494455d | |||
d9a1809313 | |||
0715198c7f | |||
ef5e192c3b | |||
489b28bdf7 | |||
18176c659c | |||
4c48a816bf | |||
9af7aaab59 | |||
a54a09314f | |||
e2fcd73720 | |||
e238b90836 | |||
69e5b66b50 | |||
e8e6d71c41 | |||
4ba476e25c | |||
e5fe9ea5f6 | |||
e1906c9312 | |||
51c95ee117 | |||
1f652e2e7d | |||
8e6c1aa78d | |||
6bff5b6107 | |||
94937db491 | |||
3dc250cc37 | |||
9560799175 | |||
8f3c5b1587 | |||
285125d06a | |||
a26185fe05 | |||
a7efa3a676 | |||
d596ef5c38 | |||
34e34ef564 | |||
8032d0afb6 | |||
d3bc8993ee | |||
62ed169a39 | |||
979d0cfeee | |||
29165d8e60 | |||
2d77db6bc2 | |||
74f8295960 | |||
f2727095d9 | |||
d4285b7c6c | |||
2e4265a778 | |||
81258d3e8a | |||
a6bead90d7 | |||
87caaf2459 | |||
af9c6afd25 | |||
8b5727a0aa | |||
aeae47c9bf | |||
1aff758688 | |||
4a42bc5083 | |||
5fa70e4010 | |||
d4e3355f56 | |||
94f257e557 | |||
e5f53d6dee | |||
cbd4bef814 | |||
2d57529e77 | |||
2b74999703 | |||
fe081d0ebc | |||
5ef7a27be3 | |||
c9a18f4de6 | |||
f2a24881d0 | |||
cee00005ab | |||
049575b5a5 | |||
a93937f80d | |||
488ebaa1af | |||
8278d3875b | |||
736ba44031 | |||
a6ff6a94df | |||
17f78b948a | |||
fe1040a367 | |||
83048e6c7c | |||
9128647970 | |||
9629705100 | |||
cd663f78af | |||
3c483ace4f | |||
3e949fcf33 | |||
81b0afc349 | |||
a04da3ec50 | |||
9e0482afbb | |||
9de40f8976 | |||
ba4df55d6e | |||
de8d2d6dc0 | |||
65b423c503 | |||
ff20b5a6fb | |||
37d86ff55c | |||
4e1c67617f | |||
9bc2d340a2 | |||
60fc416d8f | |||
99c9632cdc | |||
2fb772c888 | |||
87192ad07d | |||
3746831384 | |||
80d4fbb870 | |||
92c65b450e | |||
213fc0232e | |||
33be44adad | |||
ca0d66bd01 | |||
3a3d0adfa0 | |||
ca30849e24 | |||
316f3569a5 | |||
2705877235 | |||
432901db5a | |||
227d034db8 | |||
453d7da622 | |||
29fe49fb87 | |||
fcf2683112 | |||
3a996a1a3a | |||
1b14d33b9f | |||
639b7817bf | |||
163af0515f | |||
8e2b9c681a | |||
0a8d710e01 | |||
d781f7127a | |||
85d743c5d2 | |||
5f60b51cf8 | |||
7013d1b7b8 | |||
9eec872637 | |||
037850bbd5 | |||
bbe3d4e19f | |||
78a9676c7c | |||
8bf93562eb | |||
b57afd0a98 | |||
f261ef50cc | |||
7e7b9b9b48 | |||
2313213f59 | |||
5f28532423 |
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,5 +1,5 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Questions & Discussions
|
||||
url: https://github.com/Xhofe/alist/discussions
|
||||
url: https://github.com/alist-org/alist/discussions
|
||||
about: Use GitHub discussions for message-board style questions and discussions.
|
139
.github/workflows/beta_release.yml
vendored
Normal file
139
.github/workflows/beta_release.yml
vendored
Normal file
@ -0,0 +1,139 @@
|
||||
name: beta release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ 'main' ]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Beta Release Changelog
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create or update ref
|
||||
id: create-or-update-ref
|
||||
uses: ovsds/create-or-update-ref-action@v1
|
||||
with:
|
||||
ref: tags/beta
|
||||
sha: ${{ github.sha }}
|
||||
|
||||
- name: Delete beta tag
|
||||
run: git tag -d beta
|
||||
continue-on-error: true
|
||||
|
||||
- name: changelog # or changelogithub@0.12 if ensure the stable result
|
||||
id: changelog
|
||||
run: |
|
||||
git tag -l
|
||||
npx changelogithub --output CHANGELOG.md
|
||||
# npx changelogen@latest --output CHANGELOG.md
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
body_path: CHANGELOG.md
|
||||
files: CHANGELOG.md
|
||||
prerelease: true
|
||||
tag_name: beta
|
||||
|
||||
release:
|
||||
needs:
|
||||
- changelog
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: '!(*musl*|*windows-arm64*|*android*|*freebsd*)' # xgo
|
||||
hash: "md5"
|
||||
- target: 'linux-!(arm*)-musl*' #musl-not-arm
|
||||
hash: "md5-linux-musl"
|
||||
- target: 'linux-arm*-musl*' #musl-arm
|
||||
hash: "md5-linux-musl-arm"
|
||||
- target: 'windows-arm64' #win-arm64
|
||||
hash: "md5-windows-arm64"
|
||||
- target: 'android-*' #android
|
||||
hash: "md5-android"
|
||||
- target: 'freebsd-*' #freebsd
|
||||
hash: "md5-freebsd"
|
||||
|
||||
name: Beta Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.22'
|
||||
|
||||
- name: Setup web
|
||||
run: bash build.sh dev web
|
||||
|
||||
- name: Build
|
||||
uses: go-cross/cgo-actions@v1
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
musl-target-format: $os-$musl-$arch
|
||||
out-dir: build
|
||||
x-flags: |
|
||||
github.com/alist-org/alist/v3/internal/conf.BuiltAt=$built_at
|
||||
github.com/alist-org/alist/v3/internal/conf.GoVersion=$go_version
|
||||
github.com/alist-org/alist/v3/internal/conf.GitAuthor=Xhofe
|
||||
github.com/alist-org/alist/v3/internal/conf.GitCommit=$git_commit
|
||||
github.com/alist-org/alist/v3/internal/conf.Version=$tag
|
||||
github.com/alist-org/alist/v3/internal/conf.WebVersion=dev
|
||||
|
||||
- name: Compress
|
||||
run: |
|
||||
bash build.sh zip ${{ matrix.hash }}
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
prerelease: true
|
||||
tag_name: beta
|
||||
|
||||
desktop:
|
||||
needs:
|
||||
- release
|
||||
name: Beta Release Desktop
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: alist-org/desktop-release
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Commit
|
||||
run: |
|
||||
git config --local user.email "bot@nn.ci"
|
||||
git config --local user.name "IlaBot"
|
||||
git commit --allow-empty -m "Trigger build for ${{ github.sha }}"
|
||||
|
||||
- name: Push commit
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
repository: alist-org/desktop-release
|
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: benjlevesque/short-sha@v2.2
|
||||
- uses: benjlevesque/short-sha@v3.0
|
||||
id: short-sha
|
||||
|
||||
- name: Install dependencies
|
||||
|
117
.github/workflows/build_docker.yml
vendored
117
.github/workflows/build_docker.yml
vendored
@ -1,117 +0,0 @@
|
||||
name: build_docker
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build_docker:
|
||||
name: Build Docker
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: xhofe/alist
|
||||
|
||||
- name: Docker meta with ffmpeg
|
||||
id: meta-ffmpeg
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: xhofe/alist
|
||||
flavor: |
|
||||
suffix=-ffmpeg,onlatest=true
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 'stable'
|
||||
|
||||
- name: Cache Musl
|
||||
id: cache-musl
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: build/musl-libs
|
||||
key: docker-musl-libs
|
||||
|
||||
- name: Download Musl Library
|
||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||
run: bash build.sh prepare docker-multiplatform
|
||||
|
||||
- name: Build go binary
|
||||
run: bash build.sh dev docker-multiplatform
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: xhofe
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.ci
|
||||
push: ${{ github.event_name == 'push' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
- name: Replace dockerfile tag
|
||||
run: |
|
||||
sed -i -e "s/latest/main/g" Dockerfile.ffmpeg
|
||||
|
||||
- name: Build and push with ffmpeg
|
||||
id: docker_build_ffmpeg
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.ffmpeg
|
||||
push: ${{ github.event_name == 'push' }}
|
||||
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
build_docker_with_aria2:
|
||||
needs: build_docker
|
||||
name: Build docker with aria2
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push'
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: alist-org/with_aria2
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Commit
|
||||
run: |
|
||||
git config --local user.email "bot@nn.ci"
|
||||
git config --local user.name "IlaBot"
|
||||
git commit --allow-empty -m "Trigger build for ${{ github.sha }}"
|
||||
|
||||
- name: Push commit
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
repository: alist-org/with_aria2
|
7
.github/workflows/changelog.yml
vendored
7
.github/workflows/changelog.yml
vendored
@ -3,7 +3,7 @@ name: auto changelog
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
@ -14,6 +14,11 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Delete beta tag
|
||||
run: git tag -d beta
|
||||
continue-on-error: true
|
||||
|
||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
||||
|
2
.github/workflows/issue_question.yml
vendored
2
.github/workflows/issue_question.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
if: github.event.label.name == 'question'
|
||||
steps:
|
||||
- name: Create comment
|
||||
uses: actions-cool/issues-helper@v3.5.2
|
||||
uses: actions-cool/issues-helper@v3.6.0
|
||||
with:
|
||||
actions: 'create-comment'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
19
.github/workflows/release.yml
vendored
19
.github/workflows/release.yml
vendored
@ -13,6 +13,23 @@ jobs:
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
|
||||
# all of these default to true, but feel free to set to
|
||||
# "false" if necessary for your workflow
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
|
||||
- name: Prerelease
|
||||
uses: irongut/EditRelease@v1.2.0
|
||||
with:
|
||||
@ -42,7 +59,7 @@ jobs:
|
||||
bash build.sh release
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
prerelease: false
|
||||
|
2
.github/workflows/release_android.yml
vendored
2
.github/workflows/release_android.yml
vendored
@ -29,6 +29,6 @@ jobs:
|
||||
bash build.sh release android
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
|
150
.github/workflows/release_docker.yml
vendored
150
.github/workflows/release_docker.yml
vendored
@ -3,11 +3,35 @@ name: release_docker
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*'
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: 'xhofe/alist'
|
||||
REGISTRY_USERNAME: 'xhofe'
|
||||
REGISTRY_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
ARTIFACT_NAME: 'binaries_docker_release'
|
||||
RELEASE_PLATFORMS: 'linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64'
|
||||
IMAGE_PUSH: ${{ github.event_name == 'push' }}
|
||||
IMAGE_IS_PROD: ${{ github.ref_type == 'tag' }}
|
||||
IMAGE_TAGS_BETA: |
|
||||
type=schedule
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=ref,event=pr
|
||||
type=raw,value=beta,enable={{is_default_branch}}
|
||||
|
||||
jobs:
|
||||
release_docker:
|
||||
name: Release Docker
|
||||
build_binary:
|
||||
name: Build Binaries for Docker Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
@ -22,20 +46,59 @@ jobs:
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: build/musl-libs
|
||||
key: docker-musl-libs
|
||||
key: docker-musl-libs-v2
|
||||
|
||||
- name: Download Musl Library
|
||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||
run: bash build.sh prepare docker-multiplatform
|
||||
|
||||
- name: Build go binary
|
||||
- name: Build go binary (beta)
|
||||
if: env.IMAGE_IS_PROD != 'true'
|
||||
run: bash build.sh beta docker-multiplatform
|
||||
|
||||
- name: Build go binary (release)
|
||||
if: env.IMAGE_IS_PROD == 'true'
|
||||
run: bash build.sh release docker-multiplatform
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
images: xhofe/alist
|
||||
name: ${{ env.ARTIFACT_NAME }}
|
||||
overwrite: true
|
||||
path: |
|
||||
build/
|
||||
!build/*.tgz
|
||||
!build/musl-libs/**
|
||||
|
||||
release_docker:
|
||||
needs: build_binary
|
||||
name: Release Docker image
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["latest", "ffmpeg", "aria2", "aio"]
|
||||
include:
|
||||
- image: "latest"
|
||||
build_arg: ""
|
||||
tag_favor: ""
|
||||
- image: "ffmpeg"
|
||||
build_arg: INSTALL_FFMPEG=true
|
||||
tag_favor: "suffix=-ffmpeg,onlatest=true"
|
||||
- image: "aria2"
|
||||
build_arg: INSTALL_ARIA2=true
|
||||
tag_favor: "suffix=-aria2,onlatest=true"
|
||||
- image: "aio"
|
||||
build_arg: |
|
||||
INSTALL_FFMPEG=true
|
||||
INSTALL_ARIA2=true
|
||||
tag_favor: "suffix=-aio,onlatest=true"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ env.ARTIFACT_NAME }}
|
||||
path: 'build/'
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
@ -44,64 +107,31 @@ jobs:
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: env.IMAGE_PUSH == 'true'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: xhofe
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
logout: true
|
||||
username: ${{ env.REGISTRY_USERNAME }}
|
||||
password: ${{ env.REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}
|
||||
tags: ${{ env.IMAGE_IS_PROD == 'true' && '' || env.IMAGE_TAGS_BETA }}
|
||||
flavor: |
|
||||
${{ env.IMAGE_IS_PROD == 'true' && 'latest=true' || '' }}
|
||||
${{ matrix.tag_favor }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.ci
|
||||
push: true
|
||||
push: ${{ env.IMAGE_PUSH == 'true' }}
|
||||
build-args: ${{ matrix.build_arg }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
- name: Docker meta with ffmpeg
|
||||
id: meta-ffmpeg
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: xhofe/alist
|
||||
flavor: |
|
||||
latest=true
|
||||
suffix=-ffmpeg,onlatest=true
|
||||
|
||||
- name: Build and push with ffmpeg
|
||||
id: docker_build_ffmpeg
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.ffmpeg
|
||||
push: true
|
||||
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
release_docker_with_aria2:
|
||||
needs: release_docker
|
||||
name: Release docker with aria2
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: alist-org/with_aria2
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Add tag
|
||||
run: |
|
||||
git config --local user.email "bot@nn.ci"
|
||||
git config --local user.name "IlaBot"
|
||||
git tag -a ${{ github.ref_name }} -m "release ${{ github.ref_name }}"
|
||||
|
||||
- name: Push tags
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
repository: alist-org/with_aria2
|
||||
platforms: ${{ env.RELEASE_PLATFORMS }}
|
34
.github/workflows/release_freebsd.yml
vendored
Normal file
34
.github/workflows/release_freebsd.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: release_freebsd
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
jobs:
|
||||
release_freebsd:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release freebsd
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
2
.github/workflows/release_linux_musl.yml
vendored
2
.github/workflows/release_linux_musl.yml
vendored
@ -29,6 +29,6 @@ jobs:
|
||||
bash build.sh release linux_musl
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
|
2
.github/workflows/release_linux_musl_arm.yml
vendored
2
.github/workflows/release_linux_musl_arm.yml
vendored
@ -29,6 +29,6 @@ jobs:
|
||||
bash build.sh release linux_musl_arm
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: build/compress/*
|
||||
|
30
Dockerfile
30
Dockerfile
@ -8,16 +8,36 @@ COPY ./ ./
|
||||
RUN bash build.sh release docker
|
||||
|
||||
FROM alpine:edge
|
||||
|
||||
ARG INSTALL_FFMPEG=false
|
||||
ARG INSTALL_ARIA2=false
|
||||
LABEL MAINTAINER="i@nn.ci"
|
||||
VOLUME /opt/alist/data/
|
||||
|
||||
WORKDIR /opt/alist/
|
||||
COPY --from=builder /app/bin/alist ./
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
RUN apk update && \
|
||||
apk upgrade --no-cache && \
|
||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
chmod +x /entrypoint.sh && \
|
||||
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
||||
[ "$INSTALL_ARIA2" = "true" ] && apk add --no-cache curl aria2 && \
|
||||
mkdir -p /opt/aria2/.aria2 && \
|
||||
wget https://github.com/P3TERX/aria2.conf/archive/refs/heads/master.tar.gz -O /tmp/aria-conf.tar.gz && \
|
||||
tar -zxvf /tmp/aria-conf.tar.gz -C /opt/aria2/.aria2 --strip-components=1 && rm -f /tmp/aria-conf.tar.gz && \
|
||||
sed -i 's|rpc-secret|#rpc-secret|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/script.conf && \
|
||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/script.conf && \
|
||||
touch /opt/aria2/.aria2/aria2.session && \
|
||||
/opt/aria2/.aria2/tracker.sh ; \
|
||||
rm -rf /var/cache/apk/*
|
||||
ENV PUID=0 PGID=0 UMASK=022
|
||||
|
||||
COPY --from=builder /app/bin/alist ./
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /opt/alist/alist && \
|
||||
chmod +x /entrypoint.sh && /entrypoint.sh version
|
||||
|
||||
ENV PUID=0 PGID=0 UMASK=022 RUN_ARIA2=${INSTALL_ARIA2}
|
||||
VOLUME /opt/alist/data/
|
||||
EXPOSE 5244 5245
|
||||
CMD [ "/entrypoint.sh" ]
|
@ -1,16 +1,35 @@
|
||||
FROM alpine:edge
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
ARG INSTALL_FFMPEG=false
|
||||
ARG INSTALL_ARIA2=false
|
||||
LABEL MAINTAINER="i@nn.ci"
|
||||
VOLUME /opt/alist/data/
|
||||
|
||||
WORKDIR /opt/alist/
|
||||
COPY /build/${TARGETPLATFORM}/alist ./
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
RUN apk update && \
|
||||
apk upgrade --no-cache && \
|
||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
chmod +x /entrypoint.sh && \
|
||||
rm -rf /var/cache/apk/* && \
|
||||
/entrypoint.sh version
|
||||
ENV PUID=0 PGID=0 UMASK=022
|
||||
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
||||
[ "$INSTALL_ARIA2" = "true" ] && apk add --no-cache curl aria2 && \
|
||||
mkdir -p /opt/aria2/.aria2 && \
|
||||
wget https://github.com/P3TERX/aria2.conf/archive/refs/heads/master.tar.gz -O /tmp/aria-conf.tar.gz && \
|
||||
tar -zxvf /tmp/aria-conf.tar.gz -C /opt/aria2/.aria2 --strip-components=1 && rm -f /tmp/aria-conf.tar.gz && \
|
||||
sed -i 's|rpc-secret|#rpc-secret|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/script.conf && \
|
||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/script.conf && \
|
||||
touch /opt/aria2/.aria2/aria2.session && \
|
||||
/opt/aria2/.aria2/tracker.sh ; \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
COPY /build/${TARGETPLATFORM}/alist ./
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /opt/alist/alist && \
|
||||
chmod +x /entrypoint.sh && /entrypoint.sh version
|
||||
|
||||
ENV PUID=0 PGID=0 UMASK=022 RUN_ARIA2=${INSTALL_ARIA2}
|
||||
VOLUME /opt/alist/data/
|
||||
EXPOSE 5244 5245
|
||||
CMD [ "/entrypoint.sh" ]
|
@ -1,4 +0,0 @@
|
||||
FROM xhofe/alist:latest
|
||||
RUN apk update && \
|
||||
apk add --no-cache ffmpeg \
|
||||
rm -rf /var/cache/apk/*
|
22
README.md
22
README.md
@ -5,13 +5,13 @@
|
||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
|
||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<a href="https://github.com/alist-org/alist/releases">
|
||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||
</a>
|
||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||
@ -19,13 +19,13 @@
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<a href="https://github.com/Xhofe/alist/discussions">
|
||||
<a href="https://github.com/alist-org/alist/discussions">
|
||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://discord.gg/F4ymsH4xv2">
|
||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<a href="https://github.com/alist-org/alist/releases">
|
||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||
@ -39,7 +39,7 @@
|
||||
|
||||
---
|
||||
|
||||
English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||
English | [中文](./README_cn.md) | [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||
|
||||
## Features
|
||||
|
||||
@ -58,7 +58,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
||||
- [x] WebDav(Support OneDrive/SharePoint without API)
|
||||
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
||||
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
||||
- [x] [139yun](https://yun.139.com/) (Personal, Family)
|
||||
- [x] [139yun](https://yun.139.com/) (Personal, Family, Group)
|
||||
- [x] [YandexDisk](https://disk.yandex.com/)
|
||||
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
||||
- [x] [Terabox](https://www.terabox.com/main)
|
||||
@ -98,7 +98,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
||||
|
||||
## Document
|
||||
|
||||
<https://alist.nn.ci/>
|
||||
<https://alistgo.com/>
|
||||
|
||||
## Demo
|
||||
|
||||
@ -106,7 +106,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
||||
|
||||
## Discussion
|
||||
|
||||
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
|
||||
Please go to our [discussion forum](https://github.com/alist-org/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
|
||||
|
||||
## Sponsor
|
||||
|
||||
@ -117,7 +117,7 @@ https://alist.nn.ci/guide/sponsor.html
|
||||
|
||||
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||
- [找资源](http://zhaoziyuan2.cc/) - 阿里云盘资源搜索引擎
|
||||
|
||||
## Contributors
|
||||
|
||||
@ -138,4 +138,4 @@ The `AList` is open-source software licensed under the AGPL-3.0 license.
|
||||
|
||||
---
|
||||
|
||||
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
> [@GitHub](https://github.com/alist-org) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
|
18
README_cn.md
18
README_cn.md
@ -5,13 +5,13 @@
|
||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
|
||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<a href="https://github.com/alist-org/alist/releases">
|
||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||
</a>
|
||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||
@ -19,13 +19,13 @@
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<a href="https://github.com/Xhofe/alist/discussions">
|
||||
<a href="https://github.com/alist-org/alist/discussions">
|
||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://discord.gg/F4ymsH4xv2">
|
||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<a href="https://github.com/alist-org/alist/releases">
|
||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||
@ -58,7 +58,7 @@
|
||||
- [x] WebDav(支持无API的OneDrive/SharePoint)
|
||||
- [x] Teambition([中国](https://www.teambition.com/ ),[国际](https://us.teambition.com/ ))
|
||||
- [x] [分秒帧](https://www.mediatrack.cn/)
|
||||
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云)
|
||||
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云,共享群组)
|
||||
- [x] [Yandex.Disk](https://disk.yandex.com/)
|
||||
- [x] [百度网盘](http://pan.baidu.com/)
|
||||
- [x] [UC网盘](https://drive.uc.cn)
|
||||
@ -105,7 +105,7 @@
|
||||
|
||||
## 讨论
|
||||
|
||||
一般问题请到[讨论论坛](https://github.com/Xhofe/alist/discussions) ,**issue仅针对错误报告和功能请求。**
|
||||
一般问题请到[讨论论坛](https://github.com/alist-org/alist/discussions) ,**issue仅针对错误报告和功能请求。**
|
||||
|
||||
## 赞助
|
||||
|
||||
@ -115,7 +115,7 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
|
||||
|
||||
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - 苹果生态下优雅的网盘视频播放器,iPhone,iPad,Mac,Apple TV全平台支持。
|
||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
|
||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||
- [找资源](http://zhaoziyuan2.cc/) - 阿里云盘资源搜索引擎
|
||||
|
||||
## 贡献者
|
||||
|
||||
@ -136,4 +136,4 @@ Thanks goes to these wonderful people:
|
||||
|
||||
---
|
||||
|
||||
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
|
18
README_ja.md
18
README_ja.md
@ -5,13 +5,13 @@
|
||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
|
||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
|
||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<a href="https://github.com/alist-org/alist/releases">
|
||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||
</a>
|
||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||
@ -19,13 +19,13 @@
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<a href="https://github.com/Xhofe/alist/discussions">
|
||||
<a href="https://github.com/alist-org/alist/discussions">
|
||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://discord.gg/F4ymsH4xv2">
|
||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||
</a>
|
||||
<a href="https://github.com/Xhofe/alist/releases">
|
||||
<a href="https://github.com/alist-org/alist/releases">
|
||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||
@ -58,7 +58,7 @@
|
||||
- [x] WebDav(Support OneDrive/SharePoint without API)
|
||||
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
||||
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
||||
- [x] [139yun](https://yun.139.com/) (Personal, Family)
|
||||
- [x] [139yun](https://yun.139.com/) (Personal, Family, Group)
|
||||
- [x] [YandexDisk](https://disk.yandex.com/)
|
||||
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
||||
- [x] [Terabox](https://www.terabox.com/main)
|
||||
@ -106,7 +106,7 @@
|
||||
|
||||
## ディスカッション
|
||||
|
||||
一般的なご質問は[ディスカッションフォーラム](https://github.com/Xhofe/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
|
||||
一般的なご質問は[ディスカッションフォーラム](https://github.com/alist-org/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
|
||||
|
||||
## スポンサー
|
||||
|
||||
@ -117,7 +117,7 @@ https://alist.nn.ci/guide/sponsor.html
|
||||
|
||||
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||
- [找资源](http://zhaoziyuan2.cc/) - 阿里云盘资源搜索引擎
|
||||
|
||||
## コントリビューター
|
||||
|
||||
@ -138,4 +138,4 @@ https://alist.nn.ci/guide/sponsor.html
|
||||
|
||||
---
|
||||
|
||||
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
|
64
build.sh
64
build.sh
@ -7,7 +7,11 @@ gitCommit=$(git log --pretty=format:"%h" -1)
|
||||
if [ "$1" = "dev" ]; then
|
||||
version="dev"
|
||||
webVersion="dev"
|
||||
elif [ "$1" = "beta" ]; then
|
||||
version="beta"
|
||||
webVersion="dev"
|
||||
else
|
||||
git tag -d beta
|
||||
version=$(git describe --abbrev=0 --tags)
|
||||
webVersion=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||
fi
|
||||
@ -85,21 +89,14 @@ BuildDev() {
|
||||
cat md5.txt
|
||||
}
|
||||
|
||||
PrepareBuildDocker() {
|
||||
echo "replace github.com/mattn/go-sqlite3 => github.com/leso-kn/go-sqlite3 v0.0.0-20230710125852-03158dc838ed" >>go.mod
|
||||
go get gorm.io/driver/sqlite@v1.4.4
|
||||
go mod download
|
||||
}
|
||||
|
||||
BuildDocker() {
|
||||
PrepareBuildDocker
|
||||
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
|
||||
}
|
||||
|
||||
PrepareBuildDockerMusl() {
|
||||
mkdir -p build/musl-libs
|
||||
BASE="https://musl.cc/"
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross riscv64-linux-musl-cross powerpc64le-linux-musl-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
lib_tgz="build/${i}.tgz"
|
||||
@ -110,7 +107,7 @@ PrepareBuildDockerMusl() {
|
||||
}
|
||||
|
||||
BuildDockerMultiplatform() {
|
||||
PrepareBuildDocker
|
||||
go mod download
|
||||
|
||||
# run PrepareBuildDockerMusl before build
|
||||
export PATH=$PATH:$PWD/build/musl-libs/bin
|
||||
@ -118,8 +115,8 @@ BuildDockerMultiplatform() {
|
||||
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
||||
export CGO_ENABLED=1
|
||||
|
||||
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc)
|
||||
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x linux-riscv64 linux-ppc64le)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc riscv64-linux-musl-gcc powerpc64le-linux-musl-gcc)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
@ -239,6 +236,29 @@ BuildReleaseAndroid() {
|
||||
done
|
||||
}
|
||||
|
||||
BuildReleaseFreeBSD() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build/freebsd"
|
||||
OS_ARCHES=(amd64 arm64 i386)
|
||||
GO_ARCHES=(amd64 arm64 386)
|
||||
CGO_ARGS=(x86_64-unknown-freebsd14.1 aarch64-unknown-freebsd14.1 i386-unknown-freebsd14.1)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc="clang --target=${CGO_ARGS[$i]} --sysroot=/opt/freebsd/${os_arch}"
|
||||
echo building for freebsd-${os_arch}
|
||||
sudo mkdir -p "/opt/freebsd/${os_arch}"
|
||||
wget -q https://download.freebsd.org/releases/${os_arch}/14.1-RELEASE/base.txz
|
||||
sudo tar -xf ./base.txz -C /opt/freebsd/${os_arch}
|
||||
rm base.txz
|
||||
export GOOS=freebsd
|
||||
export GOARCH=${GO_ARCHES[$i]}
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
export CGO_LDFLAGS="-fuse-ld=lld"
|
||||
go build -o ./build/$appName-freebsd-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
||||
done
|
||||
}
|
||||
|
||||
MakeRelease() {
|
||||
cd build
|
||||
mkdir compress
|
||||
@ -257,6 +277,11 @@ MakeRelease() {
|
||||
tar -czvf compress/"$i".tar.gz alist
|
||||
rm -f alist
|
||||
done
|
||||
for i in $(find . -type f -name "$appName-freebsd-*"); do
|
||||
cp "$i" alist
|
||||
tar -czvf compress/"$i".tar.gz alist
|
||||
rm -f alist
|
||||
done
|
||||
for i in $(find . -type f -name "$appName-windows-*"); do
|
||||
cp "$i" alist.exe
|
||||
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
|
||||
@ -274,11 +299,17 @@ if [ "$1" = "dev" ]; then
|
||||
BuildDocker
|
||||
elif [ "$2" = "docker-multiplatform" ]; then
|
||||
BuildDockerMultiplatform
|
||||
elif [ "$2" = "web" ]; then
|
||||
echo "web only"
|
||||
else
|
||||
BuildDev
|
||||
fi
|
||||
elif [ "$1" = "release" ]; then
|
||||
FetchWebRelease
|
||||
elif [ "$1" = "release" -o "$1" = "beta" ]; then
|
||||
if [ "$1" = "beta" ]; then
|
||||
FetchWebDev
|
||||
else
|
||||
FetchWebRelease
|
||||
fi
|
||||
if [ "$2" = "docker" ]; then
|
||||
BuildDocker
|
||||
elif [ "$2" = "docker-multiplatform" ]; then
|
||||
@ -292,6 +323,11 @@ elif [ "$1" = "release" ]; then
|
||||
elif [ "$2" = "android" ]; then
|
||||
BuildReleaseAndroid
|
||||
MakeRelease "md5-android.txt"
|
||||
elif [ "$2" = "freebsd" ]; then
|
||||
BuildReleaseFreeBSD
|
||||
MakeRelease "md5-freebsd.txt"
|
||||
elif [ "$2" = "web" ]; then
|
||||
echo "web only"
|
||||
else
|
||||
BuildRelease
|
||||
MakeRelease "md5.txt"
|
||||
@ -300,6 +336,8 @@ elif [ "$1" = "prepare" ]; then
|
||||
if [ "$2" = "docker-multiplatform" ]; then
|
||||
PrepareBuildDockerMusl
|
||||
fi
|
||||
elif [ "$1" = "zip" ]; then
|
||||
MakeRelease "$2".txt
|
||||
else
|
||||
echo -e "Parameter error"
|
||||
fi
|
||||
|
@ -18,6 +18,7 @@ func Init() {
|
||||
bootstrap.InitDB()
|
||||
data.InitData()
|
||||
bootstrap.InitIndex()
|
||||
bootstrap.InitUpgradePatch()
|
||||
}
|
||||
|
||||
func Release() {
|
||||
|
54
cmd/kill.go
Normal file
54
cmd/kill.go
Normal file
@ -0,0 +1,54 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"os"
|
||||
)
|
||||
|
||||
// KillCmd represents the kill command
|
||||
var KillCmd = &cobra.Command{
|
||||
Use: "kill",
|
||||
Short: "Force kill alist server process by daemon/pid file",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
kill()
|
||||
},
|
||||
}
|
||||
|
||||
func kill() {
|
||||
initDaemon()
|
||||
if pid == -1 {
|
||||
log.Info("Seems not have been started. Try use `alist start` to start server.")
|
||||
return
|
||||
}
|
||||
process, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
|
||||
return
|
||||
}
|
||||
err = process.Kill()
|
||||
if err != nil {
|
||||
log.Errorf("failed to kill process %d: %v", pid, err)
|
||||
} else {
|
||||
log.Info("killed process: ", pid)
|
||||
}
|
||||
err = os.Remove(pidFile)
|
||||
if err != nil {
|
||||
log.Errorf("failed to remove pid file")
|
||||
}
|
||||
pid = -1
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(KillCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
@ -139,7 +139,7 @@ var LangCmd = &cobra.Command{
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := os.MkdirAll("lang", 0777)
|
||||
if err != nil {
|
||||
utils.Log.Fatal("failed create folder: %s", err.Error())
|
||||
utils.Log.Fatalf("failed create folder: %s", err.Error())
|
||||
}
|
||||
generateDriversJson()
|
||||
generateSettingsJson()
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/alist-org/alist/v3/cmd/flags"
|
||||
_ "github.com/alist-org/alist/v3/drivers"
|
||||
_ "github.com/alist-org/alist/v3/internal/archive"
|
||||
_ "github.com/alist-org/alist/v3/internal/offline_download"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
@ -4,6 +4,9 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
ftpserver "github.com/KirCute/ftpserverlib-pasvportmap"
|
||||
"github.com/KirCute/sftpd-alist"
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
@ -112,6 +115,42 @@ the address is defined in config file`,
|
||||
}
|
||||
}()
|
||||
}
|
||||
var ftpDriver *server.FtpMainDriver
|
||||
var ftpServer *ftpserver.FtpServer
|
||||
if conf.Conf.FTP.Listen != "" && conf.Conf.FTP.Enable {
|
||||
var err error
|
||||
ftpDriver, err = server.NewMainDriver()
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("failed to start ftp driver: %s", err.Error())
|
||||
} else {
|
||||
utils.Log.Infof("start ftp server on %s", conf.Conf.FTP.Listen)
|
||||
go func() {
|
||||
ftpServer = ftpserver.NewFtpServer(ftpDriver)
|
||||
err = ftpServer.ListenAndServe()
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("problem ftp server listening: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
var sftpDriver *server.SftpDriver
|
||||
var sftpServer *sftpd.SftpServer
|
||||
if conf.Conf.SFTP.Listen != "" && conf.Conf.SFTP.Enable {
|
||||
var err error
|
||||
sftpDriver, err = server.NewSftpDriver()
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("failed to start sftp driver: %s", err.Error())
|
||||
} else {
|
||||
utils.Log.Infof("start sftp server on %s", conf.Conf.SFTP.Listen)
|
||||
go func() {
|
||||
sftpServer = sftpd.NewSftpServer(sftpDriver)
|
||||
err = sftpServer.RunServer()
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("problem sftp server listening: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
// Wait for interrupt signal to gracefully shutdown the server with
|
||||
// a timeout of 1 second.
|
||||
quit := make(chan os.Signal, 1)
|
||||
@ -121,6 +160,7 @@ the address is defined in config file`,
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
utils.Log.Println("Shutdown server...")
|
||||
fs.ArchiveContentUploadTaskManager.RemoveAll()
|
||||
Release()
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
||||
defer cancel()
|
||||
@ -152,6 +192,25 @@ the address is defined in config file`,
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.FTP.Listen != "" && conf.Conf.FTP.Enable && ftpServer != nil && ftpDriver != nil {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
ftpDriver.Stop()
|
||||
if err := ftpServer.Stop(); err != nil {
|
||||
utils.Log.Fatal("FTP server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.SFTP.Listen != "" && conf.Conf.SFTP.Enable && sftpServer != nil && sftpDriver != nil {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := sftpServer.Close(); err != nil {
|
||||
utils.Log.Fatal("SFTP server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
utils.Log.Println("Server exit")
|
||||
},
|
||||
|
@ -1,10 +1,10 @@
|
||||
/*
|
||||
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
//go:build !windows
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
@ -30,11 +30,11 @@ func stop() {
|
||||
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
|
||||
return
|
||||
}
|
||||
err = process.Kill()
|
||||
err = process.Signal(syscall.SIGTERM)
|
||||
if err != nil {
|
||||
log.Errorf("failed to kill process %d: %v", pid, err)
|
||||
log.Errorf("failed to terminate process %d: %v", pid, err)
|
||||
} else {
|
||||
log.Info("killed process: ", pid)
|
||||
log.Info("terminated process: ", pid)
|
||||
}
|
||||
err = os.Remove(pidFile)
|
||||
if err != nil {
|
34
cmd/stop_windows.go
Normal file
34
cmd/stop_windows.go
Normal file
@ -0,0 +1,34 @@
|
||||
//go:build windows
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// StopCmd represents the stop command
|
||||
var StopCmd = &cobra.Command{
|
||||
Use: "stop",
|
||||
Short: "Same as the kill command",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
stop()
|
||||
},
|
||||
}
|
||||
|
||||
func stop() {
|
||||
kill()
|
||||
}
|
||||
|
||||
func init() {
|
||||
RootCmd.AddCommand(StopCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
43
drivers/115/appver.go
Normal file
43
drivers/115/appver.go
Normal file
@ -0,0 +1,43 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var (
|
||||
md5Salt = "Qclm8MGWUv59TnrR0XPg"
|
||||
appVer = "27.0.5.7"
|
||||
)
|
||||
|
||||
func (d *Pan115) getAppVersion() ([]driver115.AppVersion, error) {
|
||||
result := driver115.VersionResp{}
|
||||
resp, err := base.RestyClient.R().Get(driver115.ApiGetVersion)
|
||||
|
||||
err = driver115.CheckErr(err, &result, resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return result.Data.GetAppVersions(), nil
|
||||
}
|
||||
|
||||
func (d *Pan115) getAppVer() string {
|
||||
// todo add some cache?
|
||||
vers, err := d.getAppVersion()
|
||||
if err != nil {
|
||||
log.Warnf("[115] get app version failed: %v", err)
|
||||
return appVer
|
||||
}
|
||||
for _, ver := range vers {
|
||||
if ver.AppName == "win" {
|
||||
return ver.Version
|
||||
}
|
||||
}
|
||||
return appVer
|
||||
}
|
||||
|
||||
func (d *Pan115) initAppVer() {
|
||||
appVer = d.getAppVer()
|
||||
}
|
@ -3,6 +3,7 @@ package _115
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
@ -16,8 +17,9 @@ import (
|
||||
type Pan115 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *driver115.Pan115Client
|
||||
limiter *rate.Limiter
|
||||
client *driver115.Pan115Client
|
||||
limiter *rate.Limiter
|
||||
appVerOnce sync.Once
|
||||
}
|
||||
|
||||
func (d *Pan115) Config() driver.Config {
|
||||
@ -29,6 +31,7 @@ func (d *Pan115) GetAddition() driver.Additional {
|
||||
}
|
||||
|
||||
func (d *Pan115) Init(ctx context.Context) error {
|
||||
d.appVerOnce.Do(d.initAppVer)
|
||||
if d.LimitRate > 0 {
|
||||
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||
}
|
||||
@ -63,7 +66,7 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var userAgent = args.Header.Get("User-Agent")
|
||||
userAgent := args.Header.Get("User-Agent")
|
||||
downloadInfo, err := d.
|
||||
DownloadWithUA(file.(*FileObj).PickCode, userAgent)
|
||||
if err != nil {
|
||||
@ -76,28 +79,60 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
return link, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
|
||||
return err
|
||||
|
||||
result := driver115.MkdirResp{}
|
||||
form := map[string]string{
|
||||
"pid": parentDir.GetID(),
|
||||
"cname": dirName,
|
||||
}
|
||||
return nil
|
||||
req := d.client.NewRequest().
|
||||
SetFormData(form).
|
||||
SetResult(&result).
|
||||
ForceContentType("application/json;charset=UTF-8")
|
||||
|
||||
resp, err := req.Post(driver115.ApiDirAdd)
|
||||
|
||||
err = driver115.CheckErr(err, &result, resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := d.getNewFile(result.FileID)
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
return d.client.Move(dstDir.GetID(), srcObj.GetID())
|
||||
if err := d.client.Move(dstDir.GetID(), srcObj.GetID()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := d.getNewFile(srcObj.GetID())
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
return d.client.Rename(srcObj.GetID(), newName)
|
||||
if err := d.client.Rename(srcObj.GetID(), newName); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := d.getNewFile((srcObj.GetID()))
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
@ -114,9 +149,9 @@ func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return d.client.Delete(obj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var (
|
||||
@ -125,10 +160,10 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
)
|
||||
|
||||
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
|
||||
return driver115.ErrUploadTooLarge
|
||||
return nil, driver115.ErrUploadTooLarge
|
||||
}
|
||||
//if digest, err = d.client.GetDigestResult(stream); err != nil {
|
||||
// return err
|
||||
@ -141,22 +176,22 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
}
|
||||
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
preHash, err := utils.HashReader(utils.SHA1, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
preHash = strings.ToUpper(preHash)
|
||||
fullHash := stream.GetHash().GetHash(utils.SHA1)
|
||||
if len(fullHash) <= 0 {
|
||||
tmpF, err := stream.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
fullHash, err = utils.HashFile(utils.SHA1, tmpF)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
fullHash = strings.ToUpper(fullHash)
|
||||
@ -165,21 +200,52 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
// note that 115 add timeout for rapid-upload,
|
||||
// and "sig invalid" err is thrown even when the hash is correct after timeout.
|
||||
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
if matched, err := fastInfo.Ok(); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
} else if matched {
|
||||
return nil
|
||||
f, err := d.getNewFileByPickCode(fastInfo.PickCode)
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
var uploadResult *UploadResult
|
||||
// 闪传失败,上传
|
||||
if stream.GetSize() <= utils.KB { // 文件大小小于1KB,改用普通模式上传
|
||||
return d.client.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID)
|
||||
if stream.GetSize() <= 10*utils.MB { // 文件大小小于10MB,改用普通模式上传
|
||||
if uploadResult, err = d.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
// 分片上传
|
||||
if uploadResult, err = d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
// 分片上传
|
||||
return d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID)
|
||||
|
||||
file, err := d.getNewFile(uploadResult.Data.FileID)
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
return file, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) OfflineList(ctx context.Context) ([]*driver115.OfflineTask, error) {
|
||||
resp, err := d.client.ListOfflineTask(0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp.Tasks, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) OfflineDownload(ctx context.Context, uris []string, dstDir model.Obj) ([]string, error) {
|
||||
return d.client.AddOfflineTaskURIs(uris, dstDir.GetID(), driver115.WithAppVer(appVer))
|
||||
}
|
||||
|
||||
func (d *Pan115) DeleteOfflineTasks(ctx context.Context, hashes []string, deleteFiles bool) error {
|
||||
return d.client.DeleteOfflineTasks(hashes, deleteFiles)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan115)(nil)
|
||||
|
@ -8,18 +8,18 @@ import (
|
||||
type Addition struct {
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
||||
LimitRate float64 `json:"limit_rate" type:"float" default:"2" help:"limit all api request rate ([limit]r/1s)"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Cloud",
|
||||
DefaultRoot: "0",
|
||||
//OnlyProxy: true,
|
||||
//OnlyLocal: true,
|
||||
//NoOverwriteUpload: true,
|
||||
// OnlyProxy: true,
|
||||
// OnlyLocal: true,
|
||||
// NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@ -1,10 +1,11 @@
|
||||
package _115
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"time"
|
||||
)
|
||||
|
||||
var _ model.Obj = (*FileObj)(nil)
|
||||
@ -20,3 +21,18 @@ func (f *FileObj) CreateTime() time.Time {
|
||||
func (f *FileObj) GetHash() utils.HashInfo {
|
||||
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
||||
}
|
||||
|
||||
type UploadResult struct {
|
||||
driver.BasicResp
|
||||
Data struct {
|
||||
PickCode string `json:"pick_code"`
|
||||
FileSize int `json:"file_size"`
|
||||
FileID string `json:"file_id"`
|
||||
ThumbURL string `json:"thumb_url"`
|
||||
Sha1 string `json:"sha1"`
|
||||
Aid int `json:"aid"`
|
||||
FileName string `json:"file_name"`
|
||||
Cid string `json:"cid"`
|
||||
IsVideo int `json:"is_video"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
@ -2,13 +2,14 @@ package _115
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"crypto/tls"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
@ -20,35 +21,34 @@ import (
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||
|
||||
cipher "github.com/SheltonZhu/115driver/pkg/crypto/ec115"
|
||||
crypto "github.com/SheltonZhu/115driver/pkg/crypto/m115"
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
crypto "github.com/gaoyb7/115drive-webdav/115"
|
||||
"github.com/orzogc/fake115uploader/cipher"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var UserAgent = driver115.UA115Desktop
|
||||
|
||||
// var UserAgent = driver115.UA115Browser
|
||||
func (d *Pan115) login() error {
|
||||
var err error
|
||||
opts := []driver115.Option{
|
||||
driver115.UA(UserAgent),
|
||||
driver115.UA(d.getUA()),
|
||||
func(c *driver115.Pan115Client) {
|
||||
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
},
|
||||
}
|
||||
d.client = driver115.New(opts...)
|
||||
cr := &driver115.Credential{}
|
||||
if d.Addition.QRCodeToken != "" {
|
||||
if d.QRCodeToken != "" {
|
||||
s := &driver115.QRCodeSession{
|
||||
UID: d.Addition.QRCodeToken,
|
||||
UID: d.QRCodeToken,
|
||||
}
|
||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||
return errors.Wrap(err, "failed to login by qrcode")
|
||||
}
|
||||
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||
d.Addition.QRCodeToken = ""
|
||||
} else if d.Addition.Cookie != "" {
|
||||
if err = cr.FromCookie(d.Addition.Cookie); err != nil {
|
||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s;KID=%s", cr.UID, cr.CID, cr.SEID, cr.KID)
|
||||
d.QRCodeToken = ""
|
||||
} else if d.Cookie != "" {
|
||||
if err = cr.FromCookie(d.Cookie); err != nil {
|
||||
return errors.Wrap(err, "failed to login by cookies")
|
||||
}
|
||||
d.client.ImportCredential(cr)
|
||||
@ -63,7 +63,7 @@ func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
||||
if d.PageSize <= 0 {
|
||||
d.PageSize = driver115.FileListLimit
|
||||
}
|
||||
files, err := d.client.ListWithLimit(fileId, d.PageSize)
|
||||
files, err := d.client.ListWithLimit(fileId, d.PageSize, driver115.WithMultiUrls())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -73,14 +73,42 @@ func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
const (
|
||||
appVer = "2.0.3.6"
|
||||
)
|
||||
func (d *Pan115) getNewFile(fileId string) (*FileObj, error) {
|
||||
file, err := d.client.GetFile(fileId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &FileObj{*file}, nil
|
||||
}
|
||||
|
||||
func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
||||
func (d *Pan115) getNewFileByPickCode(pickCode string) (*FileObj, error) {
|
||||
result := driver115.GetFileInfoResponse{}
|
||||
req := d.client.NewRequest().
|
||||
SetQueryParam("pick_code", pickCode).
|
||||
ForceContentType("application/json;charset=UTF-8").
|
||||
SetResult(&result)
|
||||
resp, err := req.Get(driver115.ApiFileInfo)
|
||||
if err := driver115.CheckErr(err, &result, resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(result.Files) == 0 {
|
||||
return nil, errors.New("not get file info")
|
||||
}
|
||||
fileInfo := result.Files[0]
|
||||
|
||||
f := &FileObj{}
|
||||
f.From(fileInfo)
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (d *Pan115) getUA() string {
|
||||
return fmt.Sprintf("Mozilla/5.0 115Browser/%s", appVer)
|
||||
}
|
||||
|
||||
func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
||||
key := crypto.GenerateKey()
|
||||
result := driver115.DownloadResp{}
|
||||
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
|
||||
params, err := utils.Json.Marshal(map[string]string{"pick_code": pickCode})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -88,13 +116,13 @@ func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, e
|
||||
data := crypto.Encode(params, key)
|
||||
|
||||
bodyReader := strings.NewReader(url.Values{"data": []string{data}}.Encode())
|
||||
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
|
||||
reqUrl := fmt.Sprintf("%s?t=%s", driver115.AndroidApiDownloadGetUrl, driver115.Now().String())
|
||||
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.Header.Set("Cookie", c.Cookie)
|
||||
req.Header.Set("Cookie", d.Cookie)
|
||||
req.Header.Set("User-Agent", ua)
|
||||
|
||||
resp, err := c.client.Client.GetClient().Do(req)
|
||||
resp, err := d.client.Client.GetClient().Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -117,19 +145,25 @@ func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, e
|
||||
return nil, err
|
||||
}
|
||||
|
||||
downloadInfo := driver115.DownloadData{}
|
||||
downloadInfo := struct {
|
||||
Url string `json:"url"`
|
||||
}{}
|
||||
if err := utils.Json.Unmarshal(bytes, &downloadInfo); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, info := range downloadInfo {
|
||||
if info.FileSize < 0 {
|
||||
return nil, driver115.ErrDownloadEmpty
|
||||
}
|
||||
info.Header = resp.Request.Header
|
||||
return info, nil
|
||||
}
|
||||
return nil, driver115.ErrUnexpected
|
||||
info := &driver115.DownloadInfo{}
|
||||
info.PickCode = pickCode
|
||||
info.Header = resp.Request.Header
|
||||
info.Url.Url = downloadInfo.Url
|
||||
return info, nil
|
||||
}
|
||||
|
||||
func (c *Pan115) GenerateToken(fileID, preID, timeStamp, fileSize, signKey, signVal string) string {
|
||||
userID := strconv.FormatInt(c.client.UserID, 10)
|
||||
userIDMd5 := md5.Sum([]byte(userID))
|
||||
tokenMd5 := md5.Sum([]byte(md5Salt + fileID + fileSize + signKey + signVal + userID + timeStamp + hex.EncodeToString(userIDMd5[:]) + appVer))
|
||||
return hex.EncodeToString(tokenMd5[:])
|
||||
}
|
||||
|
||||
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
||||
@ -161,7 +195,7 @@ func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID stri
|
||||
|
||||
signKey, signVal := "", ""
|
||||
for retry := true; retry; {
|
||||
t := driver115.Now()
|
||||
t := driver115.NowMilli()
|
||||
|
||||
if encodedToken, err = ecdhCipher.EncodeToken(t.ToInt64()); err != nil {
|
||||
return nil, err
|
||||
@ -172,7 +206,7 @@ func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID stri
|
||||
}
|
||||
|
||||
form.Set("t", t.String())
|
||||
form.Set("token", d.client.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
||||
form.Set("token", d.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
||||
if signKey != "" && signVal != "" {
|
||||
form.Set("sign_key", signKey)
|
||||
form.Set("sign_val", signVal)
|
||||
@ -225,6 +259,9 @@ func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result stri
|
||||
|
||||
length := end - start + 1
|
||||
reader, err := stream.RangeRead(http_range.Range{Start: start, Length: length})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
hashStr, err := utils.HashReader(utils.SHA1, reader)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@ -233,8 +270,38 @@ func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result stri
|
||||
return
|
||||
}
|
||||
|
||||
// UploadByOSS use aliyun sdk to upload
|
||||
func (c *Pan115) UploadByOSS(params *driver115.UploadOSSParams, r io.Reader, dirID string) (*UploadResult, error) {
|
||||
ossToken, err := c.client.GetOSSToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ossClient, err := oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
bucket, err := ossClient.Bucket(params.Bucket)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var bodyBytes []byte
|
||||
if err = bucket.PutObject(params.Object, r, append(
|
||||
driver115.OssOption(params, ossToken),
|
||||
oss.CallbackResult(&bodyBytes),
|
||||
)...); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var uploadResult UploadResult
|
||||
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
||||
}
|
||||
|
||||
// UploadByMultipart upload by mutipart blocks
|
||||
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) error {
|
||||
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) (*UploadResult, error) {
|
||||
var (
|
||||
chunks []oss.FileChunk
|
||||
parts []oss.UploadPart
|
||||
@ -242,12 +309,13 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
||||
ossClient *oss.Client
|
||||
bucket *oss.Bucket
|
||||
ossToken *driver115.UploadOSSTokenResp
|
||||
bodyBytes []byte
|
||||
err error
|
||||
)
|
||||
|
||||
tmpF, err := stream.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
options := driver115.DefalutUploadMultipartOptions()
|
||||
@ -256,17 +324,19 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
||||
f(options)
|
||||
}
|
||||
}
|
||||
// oss 启用Sequential必须按顺序上传
|
||||
options.ThreadsNum = 1
|
||||
|
||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret); err != nil {
|
||||
return err
|
||||
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret, oss.EnableMD5(true), oss.EnableCRC(true)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// ossToken一小时后就会失效,所以每50分钟重新获取一次
|
||||
@ -276,14 +346,15 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
||||
timeout := time.NewTimer(options.Timeout)
|
||||
|
||||
if chunks, err = SplitFile(fileSize); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if imur, err = bucket.InitiateMultipartUpload(params.Object,
|
||||
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
|
||||
oss.UserAgentHeader(driver115.OSSUserAgent),
|
||||
oss.EnableSha1(), oss.Sequential(),
|
||||
); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
wg := sync.WaitGroup{}
|
||||
@ -325,8 +396,7 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
||||
continue
|
||||
}
|
||||
|
||||
b := bytes.NewBuffer(buf)
|
||||
if part, err = bucket.UploadPart(imur, b, chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
||||
if part, err = bucket.UploadPart(imur, bytes.NewBuffer(buf), chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
@ -350,51 +420,38 @@ LOOP:
|
||||
case <-ticker.C:
|
||||
// 到时重新获取ossToken
|
||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
case <-quit:
|
||||
break LOOP
|
||||
case <-errCh:
|
||||
return err
|
||||
return nil, err
|
||||
case <-timeout.C:
|
||||
return fmt.Errorf("time out")
|
||||
return nil, fmt.Errorf("time out")
|
||||
}
|
||||
}
|
||||
|
||||
// EOF错误是xml的Unmarshal导致的,响应其实是json格式,所以实际上上传是成功的
|
||||
if _, err = bucket.CompleteMultipartUpload(imur, parts, driver115.OssOption(params, ossToken)...); err != nil && !errors.Is(err, io.EOF) {
|
||||
// 当文件名含有 &< 这两个字符之一时响应的xml解析会出现错误,实际上上传是成功的
|
||||
if filename := filepath.Base(stream.GetName()); !strings.ContainsAny(filename, "&<") {
|
||||
return err
|
||||
}
|
||||
// 不知道啥原因,oss那边分片上传不计算sha1,导致115服务器校验错误
|
||||
// params.Callback.Callback = strings.ReplaceAll(params.Callback.Callback, "${sha1}", params.SHA1)
|
||||
if _, err := bucket.CompleteMultipartUpload(imur, parts, append(
|
||||
driver115.OssOption(params, ossToken),
|
||||
oss.CallbackResult(&bodyBytes),
|
||||
)...); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.checkUploadStatus(dirID, params.SHA1)
|
||||
|
||||
var uploadResult UploadResult
|
||||
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
||||
}
|
||||
|
||||
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
||||
for _, chunk := range chunks {
|
||||
ch <- chunk
|
||||
}
|
||||
}
|
||||
func (d *Pan115) checkUploadStatus(dirID, sha1 string) error {
|
||||
// 验证上传是否成功
|
||||
req := d.client.NewRequest().ForceContentType("application/json;charset=UTF-8")
|
||||
opts := []driver115.GetFileOptions{
|
||||
driver115.WithOrder(driver115.FileOrderByTime),
|
||||
driver115.WithShowDirEnable(false),
|
||||
driver115.WithAsc(false),
|
||||
driver115.WithLimit(500),
|
||||
}
|
||||
fResp, err := driver115.GetFiles(req, dirID, opts...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, fileInfo := range fResp.Files {
|
||||
if fileInfo.Sha1 == sha1 {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return driver115.ErrUploadFailed
|
||||
}
|
||||
|
||||
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
|
||||
for i := int64(1); i < 10; i++ {
|
||||
@ -431,8 +488,8 @@ func SplitFileByPartNum(fileSize int64, chunkNum int) ([]oss.FileChunk, error) {
|
||||
}
|
||||
|
||||
var chunks []oss.FileChunk
|
||||
var chunk = oss.FileChunk{}
|
||||
var chunkN = (int64)(chunkNum)
|
||||
chunk := oss.FileChunk{}
|
||||
chunkN := (int64)(chunkNum)
|
||||
for i := int64(0); i < chunkN; i++ {
|
||||
chunk.Number = int(i + 1)
|
||||
chunk.Offset = i * (fileSize / chunkN)
|
||||
@ -454,13 +511,13 @@ func SplitFileByPartSize(fileSize int64, chunkSize int64) ([]oss.FileChunk, erro
|
||||
return nil, errors.New("chunkSize invalid")
|
||||
}
|
||||
|
||||
var chunkN = fileSize / chunkSize
|
||||
chunkN := fileSize / chunkSize
|
||||
if chunkN >= 10000 {
|
||||
return nil, errors.New("Too many parts, please increase part size")
|
||||
}
|
||||
|
||||
var chunks []oss.FileChunk
|
||||
var chunk = oss.FileChunk{}
|
||||
chunk := oss.FileChunk{}
|
||||
for i := int64(0); i < chunkN; i++ {
|
||||
chunk.Number = int(i + 1)
|
||||
chunk.Offset = i * chunkSize
|
||||
|
@ -8,9 +8,9 @@ import (
|
||||
type Addition struct {
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"20" help:"list api per page size of 115 driver"`
|
||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
||||
LimitRate float64 `json:"limit_rate" type:"float" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
||||
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
||||
driver.RootID
|
||||
@ -18,7 +18,7 @@ type Addition struct {
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Share",
|
||||
DefaultRoot: "",
|
||||
DefaultRoot: "0",
|
||||
// OnlyProxy: true,
|
||||
// OnlyLocal: true,
|
||||
CheckStatus: false,
|
||||
|
@ -96,7 +96,7 @@ func (d *Pan115Share) login() error {
|
||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||
return errors.Wrap(err, "failed to login by qrcode")
|
||||
}
|
||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s;KID=%s", cr.UID, cr.CID, cr.SEID, cr.KID)
|
||||
d.QRCodeToken = ""
|
||||
} else if d.Cookie != "" {
|
||||
if err = cr.FromCookie(d.Cookie); err != nil {
|
||||
|
@ -6,13 +6,14 @@ import (
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"golang.org/x/time/rate"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
@ -41,19 +42,19 @@ func (d *Pan123) GetAddition() driver.Additional {
|
||||
}
|
||||
|
||||
func (d *Pan123) Init(ctx context.Context) error {
|
||||
_, err := d.request(UserInfo, http.MethodGet, nil, nil)
|
||||
_, err := d.Request(UserInfo, http.MethodGet, nil, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) Drop(ctx context.Context) error {
|
||||
_, _ = d.request(Logout, http.MethodPost, func(req *resty.Request) {
|
||||
_, _ = d.Request(Logout, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{})
|
||||
}, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
files, err := d.getFiles(ctx, dir.GetID(), dir.GetName())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -81,7 +82,8 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
"size": f.Size,
|
||||
"type": f.Type,
|
||||
}
|
||||
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||
resp, err := d.Request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||
|
||||
req.SetBody(data).SetHeaders(headers)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
@ -134,7 +136,7 @@ func (d *Pan123) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
||||
"size": 0,
|
||||
"type": 1,
|
||||
}
|
||||
_, err := d.request(Mkdir, http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.Request(Mkdir, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -145,7 +147,7 @@ func (d *Pan123) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
"fileIdList": []base.Json{{"FileId": srcObj.GetID()}},
|
||||
"parentFileId": dstDir.GetID(),
|
||||
}
|
||||
_, err := d.request(Move, http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.Request(Move, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -157,7 +159,7 @@ func (d *Pan123) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
||||
"fileId": srcObj.GetID(),
|
||||
"fileName": newName,
|
||||
}
|
||||
_, err := d.request(Rename, http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.Request(Rename, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -174,7 +176,7 @@ func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
||||
"operation": true,
|
||||
"fileTrashInfoList": []File{f},
|
||||
}
|
||||
_, err := d.request(Trash, http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.Request(Trash, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -212,7 +214,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
"type": 0,
|
||||
}
|
||||
var resp UploadResp
|
||||
res, err := d.request(UploadRequest, http.MethodPost, func(req *resty.Request) {
|
||||
res, err := d.Request(UploadRequest, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
@ -247,10 +249,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = d.request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
||||
_, err = d.Request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileId": resp.Data.FileId,
|
||||
}).SetContext(ctx)
|
||||
@ -258,11 +257,12 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) APIRateLimit(api string) bool {
|
||||
limiter, _ := d.apiRateLimit.LoadOrStore(api,
|
||||
rate.NewLimiter(rate.Every(time.Millisecond*700), 1))
|
||||
ins := limiter.(*rate.Limiter)
|
||||
return ins.Allow()
|
||||
func (d *Pan123) APIRateLimit(ctx context.Context, api string) error {
|
||||
value, _ := d.apiRateLimit.LoadOrStore(api,
|
||||
rate.NewLimiter(rate.Every(700*time.Millisecond), 1))
|
||||
limiter := value.(*rate.Limiter)
|
||||
|
||||
return limiter.Wait(ctx)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan123)(nil)
|
||||
|
@ -9,14 +9,15 @@ type Addition struct {
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
AccessToken string
|
||||
//OrderBy string `json:"order_by" type:"select" options:"file_id,file_name,size,update_at" default:"file_name"`
|
||||
//OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "123Pan",
|
||||
DefaultRoot: "0",
|
||||
LocalSort: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@ -87,8 +87,9 @@ var _ model.Thumb = (*File)(nil)
|
||||
type Files struct {
|
||||
//BaseResp
|
||||
Data struct {
|
||||
InfoList []File `json:"InfoList"`
|
||||
Next string `json:"Next"`
|
||||
Total int `json:"Total"`
|
||||
InfoList []File `json:"InfoList"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,7 @@ func (d *Pan123) getS3PreSignedUrls(ctx context.Context, upReq *UploadResp, star
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
}
|
||||
var s3PreSignedUrls S3PreSignedURLs
|
||||
_, err := d.request(S3PreSignedUrls, http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.Request(S3PreSignedUrls, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &s3PreSignedUrls)
|
||||
if err != nil {
|
||||
@ -44,7 +44,7 @@ func (d *Pan123) getS3Auth(ctx context.Context, upReq *UploadResp, start, end in
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
}
|
||||
var s3PreSignedUrls S3PreSignedURLs
|
||||
_, err := d.request(S3Auth, http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.Request(S3Auth, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &s3PreSignedUrls)
|
||||
if err != nil {
|
||||
@ -63,7 +63,7 @@ func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp, file model.F
|
||||
"key": upReq.Data.Key,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
}
|
||||
_, err := d.request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.Request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, nil)
|
||||
return err
|
||||
|
@ -1,6 +1,7 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/crc32"
|
||||
@ -14,8 +15,9 @@ import (
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
resty "github.com/go-resty/resty/v2"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
@ -24,8 +26,9 @@ const (
|
||||
Api = "https://www.123pan.com/api"
|
||||
AApi = "https://www.123pan.com/a/api"
|
||||
BApi = "https://www.123pan.com/b/api"
|
||||
LoginApi = "https://login.123pan.com/api"
|
||||
MainApi = BApi
|
||||
SignIn = MainApi + "/user/sign_in"
|
||||
SignIn = LoginApi + "/user/sign_in"
|
||||
Logout = MainApi + "/user/logout"
|
||||
UserInfo = MainApi + "/user/info"
|
||||
FileList = MainApi + "/file/list/new"
|
||||
@ -191,7 +194,9 @@ func (d *Pan123) login() error {
|
||||
// return &authKey, nil
|
||||
//}
|
||||
|
||||
func (d *Pan123) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
func (d *Pan123) Request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
isRetry := false
|
||||
do:
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
@ -220,34 +225,35 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
||||
body := res.Body()
|
||||
code := utils.Json.Get(body, "code").ToInt()
|
||||
if code != 0 {
|
||||
if code == 401 {
|
||||
if !isRetry && code == 401 {
|
||||
err := d.login()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(url, method, callback, resp)
|
||||
isRetry = true
|
||||
goto do
|
||||
}
|
||||
return nil, errors.New(jsoniter.Get(body, "message").ToString())
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func (d *Pan123) getFiles(parentId string) ([]File, error) {
|
||||
func (d *Pan123) getFiles(ctx context.Context, parentId string, name string) ([]File, error) {
|
||||
page := 1
|
||||
total := 0
|
||||
res := make([]File, 0)
|
||||
// 2024-02-06 fix concurrency by 123pan
|
||||
for {
|
||||
if !d.APIRateLimit(FileList) {
|
||||
time.Sleep(time.Millisecond * 200)
|
||||
continue
|
||||
if err := d.APIRateLimit(ctx, FileList); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var resp Files
|
||||
query := map[string]string{
|
||||
"driveId": "0",
|
||||
"limit": "100",
|
||||
"next": "0",
|
||||
"orderBy": d.OrderBy,
|
||||
"orderDirection": d.OrderDirection,
|
||||
"orderBy": "file_id",
|
||||
"orderDirection": "desc",
|
||||
"parentFileId": parentId,
|
||||
"trashed": "false",
|
||||
"SearchData": "",
|
||||
@ -257,17 +263,22 @@ func (d *Pan123) getFiles(parentId string) ([]File, error) {
|
||||
"operateType": "4",
|
||||
"inDirectSpace": "false",
|
||||
}
|
||||
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
_res, err := d.Request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debug(string(_res))
|
||||
page++
|
||||
res = append(res, resp.Data.InfoList...)
|
||||
total = resp.Data.Total
|
||||
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(res) != total {
|
||||
log.Warnf("incorrect file count from remote at %s: expected %d, got %d", name, total, len(res))
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
@ -4,12 +4,14 @@ import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"golang.org/x/time/rate"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"golang.org/x/time/rate"
|
||||
|
||||
_123 "github.com/alist-org/alist/v3/drivers/123"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
@ -23,6 +25,7 @@ type Pan123Share struct {
|
||||
model.Storage
|
||||
Addition
|
||||
apiRateLimit sync.Map
|
||||
ref *_123.Pan123
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Config() driver.Config {
|
||||
@ -39,13 +42,23 @@ func (d *Pan123Share) Init(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) InitReference(storage driver.Driver) error {
|
||||
refStorage, ok := storage.(*_123.Pan123)
|
||||
if ok {
|
||||
d.ref = refStorage
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("ref: storage is not 123Pan")
|
||||
}
|
||||
|
||||
func (d *Pan123Share) Drop(ctx context.Context) error {
|
||||
d.ref = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
// TODO return the files list, required
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
files, err := d.getFiles(ctx, dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -150,11 +163,12 @@ func (d *Pan123Share) Put(ctx context.Context, dstDir model.Obj, stream model.Fi
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
func (d *Pan123Share) APIRateLimit(api string) bool {
|
||||
limiter, _ := d.apiRateLimit.LoadOrStore(api,
|
||||
rate.NewLimiter(rate.Every(time.Millisecond*700), 1))
|
||||
ins := limiter.(*rate.Limiter)
|
||||
return ins.Allow()
|
||||
func (d *Pan123Share) APIRateLimit(ctx context.Context, api string) error {
|
||||
value, _ := d.apiRateLimit.LoadOrStore(api,
|
||||
rate.NewLimiter(rate.Every(700*time.Millisecond), 1))
|
||||
limiter := value.(*rate.Limiter)
|
||||
|
||||
return limiter.Wait(ctx)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan123Share)(nil)
|
||||
|
@ -9,9 +9,9 @@ type Addition struct {
|
||||
ShareKey string `json:"sharekey" required:"true"`
|
||||
SharePwd string `json:"sharepassword"`
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
AccessToken string `json:"accesstoken" type:"text"`
|
||||
//OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
//OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
AccessToken string `json:"accesstoken" type:"text"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -1,6 +1,7 @@
|
||||
package _123Share
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/crc32"
|
||||
@ -52,6 +53,9 @@ func GetApi(rawUrl string) string {
|
||||
}
|
||||
|
||||
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
if d.ref != nil {
|
||||
return d.ref.Request(url, method, callback, resp)
|
||||
}
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
@ -80,20 +84,19 @@ func (d *Pan123Share) request(url string, method string, callback base.ReqCallba
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func (d *Pan123Share) getFiles(parentId string) ([]File, error) {
|
||||
func (d *Pan123Share) getFiles(ctx context.Context, parentId string) ([]File, error) {
|
||||
page := 1
|
||||
res := make([]File, 0)
|
||||
for {
|
||||
if !d.APIRateLimit(FileList) {
|
||||
time.Sleep(time.Millisecond * 200)
|
||||
continue
|
||||
if err := d.APIRateLimit(ctx, FileList); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var resp Files
|
||||
query := map[string]string{
|
||||
"limit": "100",
|
||||
"next": "0",
|
||||
"orderBy": d.OrderBy,
|
||||
"orderDirection": d.OrderDirection,
|
||||
"orderBy": "file_id",
|
||||
"orderDirection": "desc",
|
||||
"parentFileId": parentId,
|
||||
"Page": strconv.Itoa(page),
|
||||
"shareKey": d.ShareKey,
|
||||
|
@ -2,28 +2,29 @@ package _139
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/pkg/cron"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Yun139 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
cron *cron.Cron
|
||||
cron *cron.Cron
|
||||
Account string
|
||||
ref *Yun139
|
||||
}
|
||||
|
||||
func (d *Yun139) Config() driver.Config {
|
||||
@ -35,56 +36,78 @@ func (d *Yun139) GetAddition() driver.Additional {
|
||||
}
|
||||
|
||||
func (d *Yun139) Init(ctx context.Context) error {
|
||||
if d.Authorization == "" {
|
||||
return fmt.Errorf("authorization is empty")
|
||||
}
|
||||
d.cron = cron.NewCron(time.Hour * 24 * 7)
|
||||
d.cron.Do(func() {
|
||||
if d.ref == nil {
|
||||
if d.Authorization == "" {
|
||||
return fmt.Errorf("authorization is empty")
|
||||
}
|
||||
err := d.refreshToken()
|
||||
if err != nil {
|
||||
log.Errorf("%+v", err)
|
||||
return err
|
||||
}
|
||||
})
|
||||
d.cron = cron.NewCron(time.Hour * 12)
|
||||
d.cron.Do(func() {
|
||||
err := d.refreshToken()
|
||||
if err != nil {
|
||||
log.Errorf("%+v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
if len(d.Addition.RootFolderID) == 0 {
|
||||
d.RootFolderID = "/"
|
||||
}
|
||||
return nil
|
||||
case MetaPersonal:
|
||||
if len(d.Addition.RootFolderID) == 0 {
|
||||
d.RootFolderID = "root"
|
||||
}
|
||||
fallthrough
|
||||
case MetaGroup:
|
||||
if len(d.Addition.RootFolderID) == 0 {
|
||||
d.RootFolderID = d.CloudID
|
||||
}
|
||||
case MetaFamily:
|
||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
decodeStr := string(decode)
|
||||
splits := strings.Split(decodeStr, ":")
|
||||
if len(splits) < 2 {
|
||||
return fmt.Errorf("authorization is invalid, splits < 2")
|
||||
}
|
||||
d.Account = splits[1]
|
||||
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
||||
"qryUserExternInfoReq": base.Json{
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}, nil)
|
||||
return err
|
||||
default:
|
||||
return errs.NotImplement
|
||||
}
|
||||
// if d.ref != nil {
|
||||
// return nil
|
||||
// }
|
||||
// decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// decodeStr := string(decode)
|
||||
// splits := strings.Split(decodeStr, ":")
|
||||
// if len(splits) < 2 {
|
||||
// return fmt.Errorf("authorization is invalid, splits < 2")
|
||||
// }
|
||||
// d.Account = splits[1]
|
||||
// _, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
||||
// "qryUserExternInfoReq": base.Json{
|
||||
// "commonAccountInfo": base.Json{
|
||||
// "account": d.getAccount(),
|
||||
// "accountType": 1,
|
||||
// },
|
||||
// },
|
||||
// }, nil)
|
||||
// return err
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Yun139) InitReference(storage driver.Driver) error {
|
||||
refStorage, ok := storage.(*Yun139)
|
||||
if ok {
|
||||
d.ref = refStorage
|
||||
return nil
|
||||
}
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Yun139) Drop(ctx context.Context) error {
|
||||
if d.cron != nil {
|
||||
d.cron.Stop()
|
||||
}
|
||||
d.ref = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -96,6 +119,8 @@ func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) (
|
||||
return d.getFiles(dir.GetID())
|
||||
case MetaFamily:
|
||||
return d.familyGetFiles(dir.GetID())
|
||||
case MetaGroup:
|
||||
return d.groupGetFiles(dir.GetID())
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
@ -108,9 +133,11 @@ func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
case MetaPersonalNew:
|
||||
url, err = d.personalGetLink(file.GetID())
|
||||
case MetaPersonal:
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
url, err = d.getLink(file.GetID())
|
||||
case MetaFamily:
|
||||
url, err = d.familyGetLink(file.GetID(), file.GetPath())
|
||||
case MetaGroup:
|
||||
url, err = d.groupGetLink(file.GetID(), file.GetPath())
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
@ -139,7 +166,7 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
||||
"parentCatalogID": parentDir.GetID(),
|
||||
"newCatalogName": dirName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
@ -150,12 +177,26 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
||||
data := base.Json{
|
||||
"cloudID": d.CloudID,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
"docLibName": dirName,
|
||||
"path": path.Join(parentDir.GetPath(), parentDir.GetID()),
|
||||
}
|
||||
pathname := "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
|
||||
pathname := "/orchestration/familyCloud-rebuild/cloudCatalog/v1.0/createCloudDoc"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
case MetaGroup:
|
||||
data := base.Json{
|
||||
"catalogName": dirName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
"groupID": d.CloudID,
|
||||
"parentFileId": parentDir.GetID(),
|
||||
"path": path.Join(parentDir.GetPath(), parentDir.GetID()),
|
||||
}
|
||||
pathname := "/orchestration/group-rebuild/catalog/v1.0/createGroupCatalog"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
@ -176,6 +217,34 @@ func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj,
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
case MetaGroup:
|
||||
var contentList []string
|
||||
var catalogList []string
|
||||
if srcObj.IsDir() {
|
||||
catalogList = append(catalogList, srcObj.GetID())
|
||||
} else {
|
||||
contentList = append(contentList, srcObj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"taskType": 3,
|
||||
"srcType": 2,
|
||||
"srcGroupID": d.CloudID,
|
||||
"destType": 2,
|
||||
"destGroupID": d.CloudID,
|
||||
"destPath": dstDir.GetPath(),
|
||||
"contentList": contentList,
|
||||
"catalogList": catalogList,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/group-rebuild/task/v1.0/createBatchOprTask"
|
||||
_, err := d.post(pathname, data, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
case MetaPersonal:
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
@ -194,7 +263,7 @@ func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj,
|
||||
"newCatalogID": dstDir.GetID(),
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
@ -229,7 +298,7 @@ func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
||||
"catalogID": srcObj.GetID(),
|
||||
"catalogName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
@ -239,13 +308,72 @@ func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
||||
"contentID": srcObj.GetID(),
|
||||
"contentName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
|
||||
}
|
||||
_, err = d.post(pathname, data, nil)
|
||||
case MetaGroup:
|
||||
var data base.Json
|
||||
var pathname string
|
||||
if srcObj.IsDir() {
|
||||
data = base.Json{
|
||||
"groupID": d.CloudID,
|
||||
"modifyCatalogID": srcObj.GetID(),
|
||||
"modifyCatalogName": newName,
|
||||
"path": srcObj.GetPath(),
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/group-rebuild/catalog/v1.0/modifyGroupCatalog"
|
||||
} else {
|
||||
data = base.Json{
|
||||
"groupID": d.CloudID,
|
||||
"contentID": srcObj.GetID(),
|
||||
"contentName": newName,
|
||||
"path": srcObj.GetPath(),
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/group-rebuild/content/v1.0/modifyGroupContent"
|
||||
}
|
||||
_, err = d.post(pathname, data, nil)
|
||||
case MetaFamily:
|
||||
var data base.Json
|
||||
var pathname string
|
||||
if srcObj.IsDir() {
|
||||
// 网页接口不支持重命名家庭云文件夹
|
||||
// data = base.Json{
|
||||
// "catalogType": 3,
|
||||
// "catalogID": srcObj.GetID(),
|
||||
// "catalogName": newName,
|
||||
// "commonAccountInfo": base.Json{
|
||||
// "account": d.getAccount(),
|
||||
// "accountType": 1,
|
||||
// },
|
||||
// "path": srcObj.GetPath(),
|
||||
// }
|
||||
// pathname = "/orchestration/familyCloud-rebuild/photoContent/v1.0/modifyCatalogInfo"
|
||||
return errs.NotImplement
|
||||
} else {
|
||||
data = base.Json{
|
||||
"contentID": srcObj.GetID(),
|
||||
"contentName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
"path": srcObj.GetPath(),
|
||||
}
|
||||
pathname = "/orchestration/familyCloud-rebuild/photoContent/v1.0/modifyContentInfo"
|
||||
}
|
||||
_, err = d.post(pathname, data, nil)
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
}
|
||||
@ -281,7 +409,7 @@ func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
"newCatalogID": dstDir.GetID(),
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
@ -303,6 +431,28 @@ func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
||||
pathname := "/hcy/recyclebin/batchTrash"
|
||||
_, err := d.personalPost(pathname, data, nil)
|
||||
return err
|
||||
case MetaGroup:
|
||||
var contentList []string
|
||||
var catalogList []string
|
||||
// 必须使用完整路径删除
|
||||
if obj.IsDir() {
|
||||
catalogList = append(catalogList, obj.GetPath())
|
||||
} else {
|
||||
contentList = append(contentList, path.Join(obj.GetPath(), obj.GetID()))
|
||||
}
|
||||
data := base.Json{
|
||||
"taskType": 2,
|
||||
"srcGroupID": d.CloudID,
|
||||
"contentList": contentList,
|
||||
"catalogList": catalogList,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/group-rebuild/task/v1.0/createBatchOprTask"
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
case MetaPersonal:
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
@ -323,7 +473,7 @@ func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
||||
"catalogInfoList": catalogInfoList,
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
@ -334,13 +484,15 @@ func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
||||
"catalogList": catalogInfoList,
|
||||
"contentList": contentInfoList,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
"sourceCloudID": d.CloudID,
|
||||
"sourceCatalogType": 1002,
|
||||
"taskType": 2,
|
||||
"path": obj.GetPath(),
|
||||
}
|
||||
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
pathname = "/orchestration/familyCloud-rebuild/batchOprTask/v1.0/createBatchOprTask"
|
||||
}
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
@ -357,7 +509,10 @@ const (
|
||||
TB
|
||||
)
|
||||
|
||||
func getPartSize(size int64) int64 {
|
||||
func (d *Yun139) getPartSize(size int64) int64 {
|
||||
if d.CustomUploadPartSize != 0 {
|
||||
return d.CustomUploadPartSize
|
||||
}
|
||||
// 网盘对于分片数量存在上限
|
||||
if size/GB > 30 {
|
||||
return 512 * MB
|
||||
@ -380,24 +535,51 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
return err
|
||||
}
|
||||
}
|
||||
// return errs.NotImplement
|
||||
|
||||
partInfos := []PartInfo{}
|
||||
var partSize = d.getPartSize(stream.GetSize())
|
||||
part := (stream.GetSize() + partSize - 1) / partSize
|
||||
if part == 0 {
|
||||
part = 1
|
||||
}
|
||||
for i := int64(0); i < part; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
start := i * partSize
|
||||
byteSize := stream.GetSize() - start
|
||||
if byteSize > partSize {
|
||||
byteSize = partSize
|
||||
}
|
||||
partNumber := i + 1
|
||||
partInfo := PartInfo{
|
||||
PartNumber: partNumber,
|
||||
PartSize: byteSize,
|
||||
ParallelHashCtx: ParallelHashCtx{
|
||||
PartOffset: start,
|
||||
},
|
||||
}
|
||||
partInfos = append(partInfos, partInfo)
|
||||
}
|
||||
|
||||
// 筛选出前 100 个 partInfos
|
||||
firstPartInfos := partInfos
|
||||
if len(firstPartInfos) > 100 {
|
||||
firstPartInfos = firstPartInfos[:100]
|
||||
}
|
||||
|
||||
// 创建任务,获取上传信息和前100个分片的上传地址
|
||||
data := base.Json{
|
||||
"contentHash": fullHash,
|
||||
"contentHashAlgorithm": "SHA256",
|
||||
"contentType": "application/octet-stream",
|
||||
"parallelUpload": false,
|
||||
"partInfos": []base.Json{{
|
||||
"parallelHashCtx": base.Json{
|
||||
"partOffset": 0,
|
||||
},
|
||||
"partNumber": 1,
|
||||
"partSize": stream.GetSize(),
|
||||
}},
|
||||
"size": stream.GetSize(),
|
||||
"parentFileId": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"fileRenameMode": "auto_rename",
|
||||
"partInfos": firstPartInfos,
|
||||
"size": stream.GetSize(),
|
||||
"parentFileId": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"fileRenameMode": "auto_rename",
|
||||
}
|
||||
pathname := "/hcy/file/create"
|
||||
var resp PersonalUploadResp
|
||||
@ -406,52 +588,156 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
return err
|
||||
}
|
||||
|
||||
if resp.Data.Exist || resp.Data.RapidUpload {
|
||||
// 判断文件是否已存在
|
||||
// resp.Data.Exist: true 已存在同名文件且校验相同,云端不会重复增加文件,无需手动处理冲突
|
||||
if resp.Data.Exist {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Progress
|
||||
p := driver.NewProgress(stream.GetSize(), up)
|
||||
// 判断文件是否支持快传
|
||||
// resp.Data.RapidUpload: true 支持快传,但此处直接检测是否返回分片的上传地址
|
||||
// 快传的情况下同样需要手动处理冲突
|
||||
if resp.Data.PartInfos != nil {
|
||||
// 读取前100个分片的上传地址
|
||||
uploadPartInfos := resp.Data.PartInfos
|
||||
|
||||
// Update Progress
|
||||
r := io.TeeReader(stream, p)
|
||||
// 获取后续分片的上传地址
|
||||
for i := 101; i < len(partInfos); i += 100 {
|
||||
end := i + 100
|
||||
if end > len(partInfos) {
|
||||
end = len(partInfos)
|
||||
}
|
||||
batchPartInfos := partInfos[i:end]
|
||||
|
||||
req, err := http.NewRequest("PUT", resp.Data.PartInfos[0].UploadUrl, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "application/octet-stream")
|
||||
req.Header.Set("Content-Length", fmt.Sprint(stream.GetSize()))
|
||||
req.Header.Set("Origin", "https://yun.139.com")
|
||||
req.Header.Set("Referer", "https://yun.139.com/")
|
||||
req.ContentLength = stream.GetSize()
|
||||
moredata := base.Json{
|
||||
"fileId": resp.Data.FileId,
|
||||
"uploadId": resp.Data.UploadId,
|
||||
"partInfos": batchPartInfos,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/hcy/file/getUploadUrl"
|
||||
var moreresp PersonalUploadUrlResp
|
||||
_, err = d.personalPost(pathname, moredata, &moreresp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploadPartInfos = append(uploadPartInfos, moreresp.Data.PartInfos...)
|
||||
}
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
// Progress
|
||||
p := driver.NewProgress(stream.GetSize(), up)
|
||||
|
||||
// 上传所有分片
|
||||
for _, uploadPartInfo := range uploadPartInfos {
|
||||
index := uploadPartInfo.PartNumber - 1
|
||||
partSize := partInfos[index].PartSize
|
||||
log.Debugf("[139] uploading part %+v/%+v", index, len(uploadPartInfos))
|
||||
limitReader := io.LimitReader(stream, partSize)
|
||||
|
||||
// Update Progress
|
||||
r := io.TeeReader(limitReader, p)
|
||||
|
||||
req, err := http.NewRequest("PUT", uploadPartInfo.UploadUrl, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "application/octet-stream")
|
||||
req.Header.Set("Content-Length", fmt.Sprint(partSize))
|
||||
req.Header.Set("Origin", "https://yun.139.com")
|
||||
req.Header.Set("Referer", "https://yun.139.com/")
|
||||
req.ContentLength = partSize
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = res.Body.Close()
|
||||
log.Debugf("[139] uploaded: %+v", res)
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||
}
|
||||
}
|
||||
|
||||
data = base.Json{
|
||||
"contentHash": fullHash,
|
||||
"contentHashAlgorithm": "SHA256",
|
||||
"fileId": resp.Data.FileId,
|
||||
"uploadId": resp.Data.UploadId,
|
||||
}
|
||||
_, err = d.personalPost("/hcy/file/complete", data, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
_ = res.Body.Close()
|
||||
log.Debugf("%+v", res)
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||
}
|
||||
|
||||
data = base.Json{
|
||||
"contentHash": fullHash,
|
||||
"contentHashAlgorithm": "SHA256",
|
||||
"fileId": resp.Data.FileId,
|
||||
"uploadId": resp.Data.UploadId,
|
||||
}
|
||||
_, err = d.personalPost("/hcy/file/complete", data, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
// 处理冲突
|
||||
if resp.Data.FileName != stream.GetName() {
|
||||
log.Debugf("[139] conflict detected: %s != %s", resp.Data.FileName, stream.GetName())
|
||||
// 给服务器一定时间处理数据,避免无法刷新文件列表
|
||||
time.Sleep(time.Millisecond * 500)
|
||||
// 刷新并获取文件列表
|
||||
files, err := d.List(ctx, dstDir, model.ListArgs{Refresh: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 删除旧文件
|
||||
for _, file := range files {
|
||||
if file.GetName() == stream.GetName() {
|
||||
log.Debugf("[139] conflict: removing old: %s", file.GetName())
|
||||
// 删除前重命名旧文件,避免仍旧冲突
|
||||
err = d.Rename(ctx, file, stream.GetName()+random.String(4))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = d.Remove(ctx, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
// 重命名新文件
|
||||
for _, file := range files {
|
||||
if file.GetName() == resp.Data.FileName {
|
||||
log.Debugf("[139] conflict: renaming new: %s => %s", file.GetName(), stream.GetName())
|
||||
err = d.Rename(ctx, file, stream.GetName())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
case MetaPersonal:
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
// 处理冲突
|
||||
// 获取文件列表
|
||||
files, err := d.List(ctx, dstDir, model.ListArgs{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 删除旧文件
|
||||
for _, file := range files {
|
||||
if file.GetName() == stream.GetName() {
|
||||
log.Debugf("[139] conflict: removing old: %s", file.GetName())
|
||||
// 删除前重命名旧文件,避免仍旧冲突
|
||||
err = d.Rename(ctx, file, stream.GetName()+random.String(4))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = d.Remove(ctx, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
data := base.Json{
|
||||
"manualRename": 2,
|
||||
"operation": 0,
|
||||
@ -465,30 +751,29 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
"parentCatalogID": dstDir.GetID(),
|
||||
"newCatalogName": "",
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
|
||||
if d.isFamily() {
|
||||
// data = d.newJson(base.Json{
|
||||
// "fileCount": 1,
|
||||
// "manualRename": 2,
|
||||
// "operation": 0,
|
||||
// "path": "",
|
||||
// "seqNo": "",
|
||||
// "totalSize": 0,
|
||||
// "uploadContentList": []base.Json{{
|
||||
// "contentName": stream.GetName(),
|
||||
// "contentSize": 0,
|
||||
// // "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
// }},
|
||||
// })
|
||||
// pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
|
||||
return errs.NotImplement
|
||||
data = d.newJson(base.Json{
|
||||
"fileCount": 1,
|
||||
"manualRename": 2,
|
||||
"operation": 0,
|
||||
"path": path.Join(dstDir.GetPath(), dstDir.GetID()),
|
||||
"seqNo": random.String(32), //序列号不能为空
|
||||
"totalSize": 0,
|
||||
"uploadContentList": []base.Json{{
|
||||
"contentName": stream.GetName(),
|
||||
"contentSize": 0,
|
||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
}},
|
||||
})
|
||||
pathname = "/orchestration/familyCloud-rebuild/content/v1.0/getFileUploadURL"
|
||||
}
|
||||
var resp UploadResp
|
||||
_, err := d.post(pathname, data, &resp)
|
||||
_, err = d.post(pathname, data, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -496,7 +781,7 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
// Progress
|
||||
p := driver.NewProgress(stream.GetSize(), up)
|
||||
|
||||
var partSize = getPartSize(stream.GetSize())
|
||||
var partSize = d.getPartSize(stream.GetSize())
|
||||
part := (stream.GetSize() + partSize - 1) / partSize
|
||||
if part == 0 {
|
||||
part = 1
|
||||
|
@ -9,17 +9,21 @@ type Addition struct {
|
||||
//Account string `json:"account" required:"true"`
|
||||
Authorization string `json:"authorization" type:"text" required:"true"`
|
||||
driver.RootID
|
||||
Type string `json:"type" type:"select" options:"personal,family,personal_new" default:"personal"`
|
||||
CloudID string `json:"cloud_id"`
|
||||
Type string `json:"type" type:"select" options:"personal_new,family,group,personal" default:"personal_new"`
|
||||
CloudID string `json:"cloud_id"`
|
||||
CustomUploadPartSize int64 `json:"custom_upload_part_size" type:"number" default:"0" help:"0 for auto"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "139Yun",
|
||||
LocalSort: true,
|
||||
Name: "139Yun",
|
||||
LocalSort: true,
|
||||
ProxyRangeOption: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Yun139{}
|
||||
d := &Yun139{}
|
||||
d.ProxyRange = true
|
||||
return d
|
||||
})
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
const (
|
||||
MetaPersonal string = "personal"
|
||||
MetaFamily string = "family"
|
||||
MetaGroup string = "group"
|
||||
MetaPersonalNew string = "personal_new"
|
||||
)
|
||||
|
||||
@ -54,6 +55,7 @@ type Content struct {
|
||||
//ContentDesc string `json:"contentDesc"`
|
||||
//ContentType int `json:"contentType"`
|
||||
//ContentOrigin int `json:"contentOrigin"`
|
||||
CreateTime string `json:"createTime"`
|
||||
UpdateTime string `json:"updateTime"`
|
||||
//CommentCount int `json:"commentCount"`
|
||||
ThumbnailURL string `json:"thumbnailURL"`
|
||||
@ -196,6 +198,37 @@ type QueryContentListResp struct {
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type QueryGroupContentListResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Result struct {
|
||||
ResultCode string `json:"resultCode"`
|
||||
ResultDesc string `json:"resultDesc"`
|
||||
} `json:"result"`
|
||||
GetGroupContentResult struct {
|
||||
ParentCatalogID string `json:"parentCatalogID"` // 根目录是"0"
|
||||
CatalogList []struct {
|
||||
Catalog
|
||||
Path string `json:"path"`
|
||||
} `json:"catalogList"`
|
||||
ContentList []Content `json:"contentList"`
|
||||
NodeCount int `json:"nodeCount"` // 文件+文件夹数量
|
||||
CtlgCnt int `json:"ctlgCnt"` // 文件夹数量
|
||||
ContCnt int `json:"contCnt"` // 文件数量
|
||||
} `json:"getGroupContentResult"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type ParallelHashCtx struct {
|
||||
PartOffset int64 `json:"partOffset"`
|
||||
}
|
||||
|
||||
type PartInfo struct {
|
||||
PartNumber int64 `json:"partNumber"`
|
||||
PartSize int64 `json:"partSize"`
|
||||
ParallelHashCtx ParallelHashCtx `json:"parallelHashCtx"`
|
||||
}
|
||||
|
||||
type PersonalThumbnail struct {
|
||||
Style string `json:"style"`
|
||||
Url string `json:"url"`
|
||||
@ -228,6 +261,7 @@ type PersonalUploadResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
FileId string `json:"fileId"`
|
||||
FileName string `json:"fileName"`
|
||||
PartInfos []PersonalPartInfo `json:"partInfos"`
|
||||
Exist bool `json:"exist"`
|
||||
RapidUpload bool `json:"rapidUpload"`
|
||||
@ -235,6 +269,15 @@ type PersonalUploadResp struct {
|
||||
}
|
||||
}
|
||||
|
||||
type PersonalUploadUrlResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
FileId string `json:"fileId"`
|
||||
UploadId string `json:"uploadId"`
|
||||
PartInfos []PersonalPartInfo `json:"partInfos"`
|
||||
}
|
||||
}
|
||||
|
||||
type RefreshTokenResp struct {
|
||||
XMLName xml.Name `xml:"root"`
|
||||
Return string `xml:"return"`
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
@ -13,9 +14,9 @@ import (
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
@ -54,14 +55,37 @@ func getTime(t string) time.Time {
|
||||
}
|
||||
|
||||
func (d *Yun139) refreshToken() error {
|
||||
url := "https://aas.caiyun.feixin.10086.cn:443/tellin/authTokenRefresh.do"
|
||||
var resp RefreshTokenResp
|
||||
if d.ref != nil {
|
||||
return d.ref.refreshToken()
|
||||
}
|
||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("authorization decode failed: %s", err)
|
||||
}
|
||||
decodeStr := string(decode)
|
||||
splits := strings.Split(decodeStr, ":")
|
||||
if len(splits) < 3 {
|
||||
return fmt.Errorf("authorization is invalid, splits < 3")
|
||||
}
|
||||
strs := strings.Split(splits[2], "|")
|
||||
if len(strs) < 4 {
|
||||
return fmt.Errorf("authorization is invalid, strs < 4")
|
||||
}
|
||||
expiration, err := strconv.ParseInt(strs[3], 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("authorization is invalid")
|
||||
}
|
||||
expiration -= time.Now().UnixMilli()
|
||||
if expiration > 1000*60*60*24*15 {
|
||||
// Authorization有效期大于15天无需刷新
|
||||
return nil
|
||||
}
|
||||
if expiration < 0 {
|
||||
return fmt.Errorf("authorization has expired")
|
||||
}
|
||||
|
||||
url := "https://aas.caiyun.feixin.10086.cn:443/tellin/authTokenRefresh.do"
|
||||
var resp RefreshTokenResp
|
||||
reqBody := "<root><token>" + splits[2] + "</token><account>" + splits[1] + "</account><clienttype>656</clienttype></root>"
|
||||
_, err = base.RestyClient.R().
|
||||
ForceContentType("application/xml").
|
||||
@ -99,21 +123,22 @@ func (d *Yun139) request(pathname string, method string, callback base.ReqCallba
|
||||
req.SetHeaders(map[string]string{
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"CMS-DEVICE": "default",
|
||||
"Authorization": "Basic " + d.Authorization,
|
||||
"Authorization": "Basic " + d.getAuthorization(),
|
||||
"mcloud-channel": "1000101",
|
||||
"mcloud-client": "10701",
|
||||
//"mcloud-route": "001",
|
||||
"mcloud-sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||
//"mcloud-skey":"",
|
||||
"mcloud-version": "6.6.0",
|
||||
"Origin": "https://yun.139.com",
|
||||
"Referer": "https://yun.139.com/w/",
|
||||
"x-DeviceInfo": "||9|6.6.0|chrome|95.0.4638.69|uwIy75obnsRPIwlJSd7D9GhUvFwG96ce||macos 10.15.2||zh-CN|||",
|
||||
"x-huawei-channelSrc": "10000034",
|
||||
"x-inner-ntwk": "2",
|
||||
"x-m4c-caller": "PC",
|
||||
"x-m4c-src": "10002",
|
||||
"x-SvcType": svcType,
|
||||
"mcloud-version": "7.14.0",
|
||||
"Origin": "https://yun.139.com",
|
||||
"Referer": "https://yun.139.com/w/",
|
||||
"x-DeviceInfo": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||
"x-huawei-channelSrc": "10000034",
|
||||
"x-inner-ntwk": "2",
|
||||
"x-m4c-caller": "PC",
|
||||
"x-m4c-src": "10002",
|
||||
"x-SvcType": svcType,
|
||||
"Inner-Hcy-Router-Https": "1",
|
||||
})
|
||||
|
||||
var e BaseResp
|
||||
@ -151,7 +176,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
|
||||
"catalogSortType": 0,
|
||||
"contentSortType": 0,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
@ -199,7 +224,7 @@ func (d *Yun139) newJson(data map[string]interface{}) base.Json {
|
||||
"cloudID": d.CloudID,
|
||||
"cloudType": 1,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
@ -220,10 +245,11 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
"sortDirection": 1,
|
||||
})
|
||||
var resp QueryContentListResp
|
||||
_, err := d.post("/orchestration/familyCloud/content/v1.0/queryContentList", data, &resp)
|
||||
_, err := d.post("/orchestration/familyCloud-rebuild/content/v1.2/queryContentList", data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
path := resp.Data.Path
|
||||
for _, catalog := range resp.Data.CloudCatalogList {
|
||||
f := model.Object{
|
||||
ID: catalog.CatalogID,
|
||||
@ -232,6 +258,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
IsFolder: true,
|
||||
Modified: getTime(catalog.LastUpdateTime),
|
||||
Ctime: getTime(catalog.CreateTime),
|
||||
Path: path, // 文件夹上一级的Path
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
@ -243,13 +270,14 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
Size: content.ContentSize,
|
||||
Modified: getTime(content.LastUpdateTime),
|
||||
Ctime: getTime(content.CreateTime),
|
||||
Path: path, // 文件所在目录的Path
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
||||
//Thumbnail: content.BigthumbnailURL,
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
if 100*pageNum > resp.Data.TotalCount {
|
||||
if resp.Data.TotalCount == 0 {
|
||||
break
|
||||
}
|
||||
pageNum++
|
||||
@ -257,12 +285,67 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) groupGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
pageNum := 1
|
||||
files := make([]model.Obj, 0)
|
||||
for {
|
||||
data := d.newJson(base.Json{
|
||||
"groupID": d.CloudID,
|
||||
"catalogID": path.Base(catalogID),
|
||||
"contentSortType": 0,
|
||||
"sortDirection": 1,
|
||||
"startNumber": pageNum,
|
||||
"endNumber": pageNum + 99,
|
||||
"path": path.Join(d.RootFolderID, catalogID),
|
||||
})
|
||||
|
||||
var resp QueryGroupContentListResp
|
||||
_, err := d.post("/orchestration/group-rebuild/content/v1.0/queryGroupContentList", data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
path := resp.Data.GetGroupContentResult.ParentCatalogID
|
||||
for _, catalog := range resp.Data.GetGroupContentResult.CatalogList {
|
||||
f := model.Object{
|
||||
ID: catalog.CatalogID,
|
||||
Name: catalog.CatalogName,
|
||||
Size: 0,
|
||||
IsFolder: true,
|
||||
Modified: getTime(catalog.UpdateTime),
|
||||
Ctime: getTime(catalog.CreateTime),
|
||||
Path: catalog.Path, // 文件夹的真实Path, root:/开头
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
for _, content := range resp.Data.GetGroupContentResult.ContentList {
|
||||
f := model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: content.ContentID,
|
||||
Name: content.ContentName,
|
||||
Size: content.ContentSize,
|
||||
Modified: getTime(content.UpdateTime),
|
||||
Ctime: getTime(content.CreateTime),
|
||||
Path: path, // 文件所在目录的Path
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
||||
//Thumbnail: content.BigthumbnailURL,
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
if (pageNum + 99) > resp.Data.GetGroupContentResult.NodeCount {
|
||||
break
|
||||
}
|
||||
pageNum = pageNum + 100
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) getLink(contentId string) (string, error) {
|
||||
data := base.Json{
|
||||
"appName": "",
|
||||
"contentID": contentId,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"account": d.getAccount(),
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
@ -273,6 +356,32 @@ func (d *Yun139) getLink(contentId string) (string, error) {
|
||||
}
|
||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
||||
}
|
||||
func (d *Yun139) familyGetLink(contentId string, path string) (string, error) {
|
||||
data := d.newJson(base.Json{
|
||||
"contentID": contentId,
|
||||
"path": path,
|
||||
})
|
||||
res, err := d.post("/orchestration/familyCloud-rebuild/content/v1.0/getFileDownLoadURL",
|
||||
data, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
||||
}
|
||||
|
||||
func (d *Yun139) groupGetLink(contentId string, path string) (string, error) {
|
||||
data := d.newJson(base.Json{
|
||||
"contentID": contentId,
|
||||
"groupID": d.CloudID,
|
||||
"path": path,
|
||||
})
|
||||
res, err := d.post("/orchestration/group-rebuild/groupManage/v1.0/getGroupFileDownLoadURL",
|
||||
data, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
||||
}
|
||||
|
||||
func unicode(str string) string {
|
||||
textQuoted := strconv.QuoteToASCII(str)
|
||||
@ -299,17 +408,17 @@ func (d *Yun139) personalRequest(pathname string, method string, callback base.R
|
||||
}
|
||||
req.SetHeaders(map[string]string{
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Authorization": "Basic " + d.Authorization,
|
||||
"Authorization": "Basic " + d.getAuthorization(),
|
||||
"Caller": "web",
|
||||
"Cms-Device": "default",
|
||||
"Mcloud-Channel": "1000101",
|
||||
"Mcloud-Client": "10701",
|
||||
"Mcloud-Route": "001",
|
||||
"Mcloud-Sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||
"Mcloud-Version": "7.13.0",
|
||||
"Mcloud-Version": "7.14.0",
|
||||
"Origin": "https://yun.139.com",
|
||||
"Referer": "https://yun.139.com/w/",
|
||||
"x-DeviceInfo": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||
"x-DeviceInfo": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||
"x-huawei-channelSrc": "10000034",
|
||||
"x-inner-ntwk": "2",
|
||||
"x-m4c-caller": "PC",
|
||||
@ -318,7 +427,7 @@ func (d *Yun139) personalRequest(pathname string, method string, callback base.R
|
||||
"X-Yun-Api-Version": "v1",
|
||||
"X-Yun-App-Channel": "10000034",
|
||||
"X-Yun-Channel-Source": "10000034",
|
||||
"X-Yun-Client-Info": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
|
||||
"X-Yun-Client-Info": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
|
||||
"X-Yun-Module-Type": "100",
|
||||
"X-Yun-Svc-Type": "1",
|
||||
})
|
||||
@ -430,3 +539,16 @@ func (d *Yun139) personalGetLink(fileId string) (string, error) {
|
||||
return jsoniter.Get(res, "data", "url").ToString(), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) getAuthorization() string {
|
||||
if d.ref != nil {
|
||||
return d.ref.getAuthorization()
|
||||
}
|
||||
return d.Authorization
|
||||
}
|
||||
func (d *Yun139) getAccount() string {
|
||||
if d.ref != nil {
|
||||
return d.ref.getAccount()
|
||||
}
|
||||
return d.Account
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ type Cloud189PC struct {
|
||||
cleanFamilyTransferFile func()
|
||||
|
||||
storageConfig driver.Config
|
||||
ref *Cloud189PC
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) Config() driver.Config {
|
||||
@ -64,20 +65,22 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
||||
y.uploadThread, y.UploadThread = 3, "3"
|
||||
}
|
||||
|
||||
// 初始化请求客户端
|
||||
if y.client == nil {
|
||||
y.client = base.NewRestyClient().SetHeaders(map[string]string{
|
||||
"Accept": "application/json;charset=UTF-8",
|
||||
"Referer": WEB_URL,
|
||||
})
|
||||
}
|
||||
if y.ref == nil {
|
||||
// 初始化请求客户端
|
||||
if y.client == nil {
|
||||
y.client = base.NewRestyClient().SetHeaders(map[string]string{
|
||||
"Accept": "application/json;charset=UTF-8",
|
||||
"Referer": WEB_URL,
|
||||
})
|
||||
}
|
||||
|
||||
// 避免重复登陆
|
||||
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
|
||||
if !y.isLogin() || y.identity != identity {
|
||||
y.identity = identity
|
||||
if err = y.login(); err != nil {
|
||||
return
|
||||
// 避免重复登陆
|
||||
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
|
||||
if !y.isLogin() || y.identity != identity {
|
||||
y.identity = identity
|
||||
if err = y.login(); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -103,7 +106,17 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func (d *Cloud189PC) InitReference(storage driver.Driver) error {
|
||||
refStorage, ok := storage.(*Cloud189PC)
|
||||
if ok {
|
||||
d.ref = refStorage
|
||||
return nil
|
||||
}
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) Drop(ctx context.Context) error {
|
||||
y.ref = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -143,7 +143,7 @@ type FamilyInfoListResp struct {
|
||||
type FamilyInfoResp struct {
|
||||
Count int `json:"count"`
|
||||
CreateTime string `json:"createTime"`
|
||||
FamilyID int `json:"familyId"`
|
||||
FamilyID int64 `json:"familyId"`
|
||||
RemarkName string `json:"remarkName"`
|
||||
Type int `json:"type"`
|
||||
UseFlag int `json:"useFlag"`
|
||||
|
@ -57,11 +57,11 @@ const (
|
||||
|
||||
func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool) map[string]string {
|
||||
dateOfGmt := getHttpDateStr()
|
||||
sessionKey := y.tokenInfo.SessionKey
|
||||
sessionSecret := y.tokenInfo.SessionSecret
|
||||
sessionKey := y.getTokenInfo().SessionKey
|
||||
sessionSecret := y.getTokenInfo().SessionSecret
|
||||
if isFamily {
|
||||
sessionKey = y.tokenInfo.FamilySessionKey
|
||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||
sessionKey = y.getTokenInfo().FamilySessionKey
|
||||
sessionSecret = y.getTokenInfo().FamilySessionSecret
|
||||
}
|
||||
|
||||
header := map[string]string{
|
||||
@ -74,9 +74,9 @@ func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool)
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
|
||||
sessionSecret := y.tokenInfo.SessionSecret
|
||||
sessionSecret := y.getTokenInfo().SessionSecret
|
||||
if isFamily {
|
||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||
sessionSecret = y.getTokenInfo().FamilySessionSecret
|
||||
}
|
||||
if params != nil {
|
||||
return AesECBEncrypt(params.Encode(), sessionSecret[:16])
|
||||
@ -85,7 +85,7 @@ func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}, isFamily ...bool) ([]byte, error) {
|
||||
req := y.client.R().SetQueryParams(clientSuffix())
|
||||
req := y.getClient().R().SetQueryParams(clientSuffix())
|
||||
|
||||
// 设置params
|
||||
paramsData := y.EncryptParams(params, isBool(isFamily...))
|
||||
@ -114,17 +114,19 @@ func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, para
|
||||
if err = y.refreshSession(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y.request(url, method, callback, params, resp)
|
||||
return y.request(url, method, callback, params, resp, isFamily...)
|
||||
}
|
||||
|
||||
// if erron.ErrorCode == "InvalidSessionKey" || erron.Code == "InvalidSessionKey" {
|
||||
if strings.Contains(res.String(), "InvalidSessionKey") {
|
||||
if err = y.refreshSession(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y.request(url, method, callback, params, resp, isFamily...)
|
||||
}
|
||||
|
||||
// 处理错误
|
||||
if erron.HasError() {
|
||||
if erron.ErrorCode == "InvalidSessionKey" {
|
||||
if err = y.refreshSession(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y.request(url, method, callback, params, resp)
|
||||
}
|
||||
return nil, &erron
|
||||
}
|
||||
return res.Body(), nil
|
||||
@ -401,6 +403,9 @@ func (y *Cloud189PC) initLoginParam() error {
|
||||
|
||||
// 刷新会话
|
||||
func (y *Cloud189PC) refreshSession() (err error) {
|
||||
if y.ref != nil {
|
||||
return y.ref.refreshSession()
|
||||
}
|
||||
var erron RespErr
|
||||
var userSessionResp UserSessionResp
|
||||
_, err = y.client.R().
|
||||
@ -618,7 +623,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
}
|
||||
|
||||
// 尝试恢复进度
|
||||
uploadProgress, ok := base.GetUploadProgress[*UploadProgress](y, y.tokenInfo.SessionKey, fileMd5Hex)
|
||||
uploadProgress, ok := base.GetUploadProgress[*UploadProgress](y, y.getTokenInfo().SessionKey, fileMd5Hex)
|
||||
if !ok {
|
||||
//step.2 预上传
|
||||
params := Params{
|
||||
@ -685,7 +690,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
if err = threadG.Wait(); err != nil {
|
||||
if errors.Is(err, context.Canceled) {
|
||||
uploadProgress.UploadParts = utils.SliceFilter(uploadProgress.UploadParts, func(s string) bool { return s != "" })
|
||||
base.SaveUploadProgress(y, uploadProgress, y.tokenInfo.SessionKey, fileMd5Hex)
|
||||
base.SaveUploadProgress(y, uploadProgress, y.getTokenInfo().SessionKey, fileMd5Hex)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
@ -1006,7 +1011,7 @@ func (y *Cloud189PC) getFamilyID() (string, error) {
|
||||
return "", fmt.Errorf("cannot get automatically,please input family_id")
|
||||
}
|
||||
for _, info := range infos {
|
||||
if strings.Contains(y.tokenInfo.LoginName, info.RemarkName) {
|
||||
if strings.Contains(y.getTokenInfo().LoginName, info.RemarkName) {
|
||||
return fmt.Sprint(info.FamilyID), nil
|
||||
}
|
||||
}
|
||||
@ -1140,3 +1145,17 @@ func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration)
|
||||
time.Sleep(t)
|
||||
}
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) getTokenInfo() *AppSessionResp {
|
||||
if y.ref != nil {
|
||||
return y.ref.getTokenInfo()
|
||||
}
|
||||
return y.tokenInfo
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) getClient() *resty.Client {
|
||||
if y.ref != nil {
|
||||
return y.ref.getClient()
|
||||
}
|
||||
return y.client
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
)
|
||||
@ -45,6 +46,9 @@ func (d *Alias) Init(ctx context.Context) error {
|
||||
d.oneKey = k
|
||||
}
|
||||
d.autoFlatten = true
|
||||
} else {
|
||||
d.oneKey = ""
|
||||
d.autoFlatten = false
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -87,8 +91,9 @@ func (d *Alias) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
var objs []model.Obj
|
||||
fsArgs := &fs.ListArgs{NoLog: true, Refresh: args.Refresh}
|
||||
for _, dst := range dsts {
|
||||
tmp, err := d.list(ctx, dst, sub)
|
||||
tmp, err := d.list(ctx, dst, sub, fsArgs)
|
||||
if err == nil {
|
||||
objs = append(objs, tmp...)
|
||||
}
|
||||
@ -105,10 +110,42 @@ func (d *Alias) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
||||
for _, dst := range dsts {
|
||||
link, err := d.link(ctx, dst, sub, args)
|
||||
if err == nil {
|
||||
if !args.Redirect && len(link.URL) > 0 {
|
||||
// 正常情况下 多并发 仅支持返回URL的驱动
|
||||
// alias套娃alias 可以让crypt、mega等驱动(不返回URL的) 支持并发
|
||||
if d.DownloadConcurrency > 0 {
|
||||
link.Concurrency = d.DownloadConcurrency
|
||||
}
|
||||
if d.DownloadPartSize > 0 {
|
||||
link.PartSize = d.DownloadPartSize * utils.KB
|
||||
}
|
||||
}
|
||||
return link, nil
|
||||
}
|
||||
}
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
func (d *Alias) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
reqPath, err := d.getReqPath(ctx, srcObj)
|
||||
if err == nil {
|
||||
return fs.Rename(ctx, *reqPath, newName)
|
||||
}
|
||||
if errs.IsNotImplement(err) {
|
||||
return errors.New("same-name files cannot be Rename")
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Alias) Remove(ctx context.Context, obj model.Obj) error {
|
||||
reqPath, err := d.getReqPath(ctx, obj)
|
||||
if err == nil {
|
||||
return fs.Remove(ctx, *reqPath)
|
||||
}
|
||||
if errs.IsNotImplement(err) {
|
||||
return errors.New("same-name files cannot be Delete")
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Alias)(nil)
|
||||
|
@ -9,19 +9,27 @@ type Addition struct {
|
||||
// Usually one of two
|
||||
// driver.RootPath
|
||||
// define other
|
||||
Paths string `json:"paths" required:"true" type:"text"`
|
||||
Paths string `json:"paths" required:"true" type:"text"`
|
||||
ProtectSameName bool `json:"protect_same_name" default:"true" required:"false" help:"Protects same-name files from Delete or Rename"`
|
||||
DownloadConcurrency int `json:"download_concurrency" default:"0" required:"false" type:"number" help:"Need to enable proxy"`
|
||||
DownloadPartSize int `json:"download_part_size" default:"0" type:"number" required:"false" help:"Need to enable proxy. Unit: KB"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "Alias",
|
||||
LocalSort: true,
|
||||
NoCache: true,
|
||||
NoUpload: true,
|
||||
DefaultRoot: "/",
|
||||
Name: "Alias",
|
||||
LocalSort: true,
|
||||
NoCache: true,
|
||||
NoUpload: true,
|
||||
DefaultRoot: "/",
|
||||
ProxyRangeOption: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Alias{}
|
||||
return &Alias{
|
||||
Addition: Addition{
|
||||
ProtectSameName: true,
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -6,8 +6,10 @@ import (
|
||||
stdpath "path"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/internal/sign"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
@ -15,7 +17,7 @@ import (
|
||||
|
||||
func (d *Alias) listRoot() []model.Obj {
|
||||
var objs []model.Obj
|
||||
for k, _ := range d.pathMap {
|
||||
for k := range d.pathMap {
|
||||
obj := model.Object{
|
||||
Name: k,
|
||||
IsFolder: true,
|
||||
@ -64,8 +66,8 @@ func (d *Alias) get(ctx context.Context, path string, dst, sub string) (model.Ob
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Alias) list(ctx context.Context, dst, sub string) ([]model.Obj, error) {
|
||||
objs, err := fs.List(ctx, stdpath.Join(dst, sub), &fs.ListArgs{NoLog: true})
|
||||
func (d *Alias) list(ctx context.Context, dst, sub string, args *fs.ListArgs) ([]model.Obj, error) {
|
||||
objs, err := fs.List(ctx, stdpath.Join(dst, sub), args)
|
||||
// the obj must implement the model.SetPath interface
|
||||
// return objs, err
|
||||
if err != nil {
|
||||
@ -93,22 +95,63 @@ func (d *Alias) list(ctx context.Context, dst, sub string) ([]model.Obj, error)
|
||||
|
||||
func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs) (*model.Link, error) {
|
||||
reqPath := stdpath.Join(dst, sub)
|
||||
storage, err := fs.GetStorage(reqPath, &fs.GetStoragesArgs{})
|
||||
// 参考 crypt 驱动
|
||||
storage, reqActualPath, err := op.GetStorageAndActualPath(reqPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, ok := storage.(*Alias); !ok && !args.Redirect {
|
||||
link, _, err := op.Link(ctx, storage, reqActualPath, args)
|
||||
return link, err
|
||||
}
|
||||
_, err = fs.Get(ctx, reqPath, &fs.GetArgs{NoLog: true})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
||||
return &model.Link{
|
||||
link := &model.Link{
|
||||
URL: fmt.Sprintf("%s/p%s?sign=%s",
|
||||
common.GetApiUrl(args.HttpReq),
|
||||
utils.EncodePath(reqPath, true),
|
||||
sign.Sign(reqPath)),
|
||||
}, nil
|
||||
}
|
||||
if args.HttpReq != nil && d.ProxyRange {
|
||||
link.RangeReadCloser = common.NoProxyRange
|
||||
}
|
||||
return link, nil
|
||||
}
|
||||
link, _, err := fs.Link(ctx, reqPath, args)
|
||||
link, _, err := op.Link(ctx, storage, reqActualPath, args)
|
||||
return link, err
|
||||
}
|
||||
|
||||
func (d *Alias) getReqPath(ctx context.Context, obj model.Obj) (*string, error) {
|
||||
root, sub := d.getRootAndPath(obj.GetPath())
|
||||
if sub == "" {
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
dsts, ok := d.pathMap[root]
|
||||
if !ok {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
var reqPath *string
|
||||
for _, dst := range dsts {
|
||||
path := stdpath.Join(dst, sub)
|
||||
_, err := fs.Get(ctx, path, &fs.GetArgs{NoLog: true})
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if !d.ProtectSameName {
|
||||
return &path, nil
|
||||
}
|
||||
if ok {
|
||||
ok = false
|
||||
} else {
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
reqPath = &path
|
||||
}
|
||||
if reqPath == nil {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
return reqPath, nil
|
||||
}
|
||||
|
@ -6,9 +6,7 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
@ -17,6 +15,7 @@ import (
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type AListV3 struct {
|
||||
@ -42,7 +41,7 @@ func (d *AListV3) Init(ctx context.Context) error {
|
||||
return err
|
||||
}
|
||||
// if the username is not empty and the username is not the same as the current username, then login again
|
||||
if d.Username != "" && d.Username != resp.Data.Username {
|
||||
if d.Username != resp.Data.Username {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return err
|
||||
@ -109,11 +108,19 @@ func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
||||
|
||||
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
var resp common.Resp[FsGetResp]
|
||||
// if PassUAToUpsteam is true, then pass the user-agent to the upstream
|
||||
userAgent := base.UserAgent
|
||||
if d.PassUAToUpsteam {
|
||||
userAgent = args.Header.Get("user-agent")
|
||||
if userAgent == "" {
|
||||
userAgent = base.UserAgent
|
||||
}
|
||||
}
|
||||
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetBody(FsGetReq{
|
||||
Path: file.GetPath(),
|
||||
Password: d.MetaPassword,
|
||||
})
|
||||
}).SetHeader("user-agent", userAgent)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -175,14 +182,50 @@ func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
_, err := d.requestWithTimeout("/fs/put", http.MethodPut, func(req *resty.Request) {
|
||||
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||
SetHeader("Password", d.MetaPassword).
|
||||
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
||||
SetContentLength(true).
|
||||
SetBody(io.ReadCloser(stream))
|
||||
}, time.Hour*6)
|
||||
return err
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPut, d.Address+"/api/fs/put", stream)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("Authorization", d.Token)
|
||||
req.Header.Set("File-Path", path.Join(dstDir.GetPath(), stream.GetName()))
|
||||
req.Header.Set("Password", d.MetaPassword)
|
||||
if md5 := stream.GetHash().GetHash(utils.MD5); len(md5) > 0 {
|
||||
req.Header.Set("X-File-Md5", md5)
|
||||
}
|
||||
if sha1 := stream.GetHash().GetHash(utils.SHA1); len(sha1) > 0 {
|
||||
req.Header.Set("X-File-Sha1", sha1)
|
||||
}
|
||||
if sha256 := stream.GetHash().GetHash(utils.SHA256); len(sha256) > 0 {
|
||||
req.Header.Set("X-File-Sha256", sha256)
|
||||
}
|
||||
|
||||
req.ContentLength = stream.GetSize()
|
||||
// client := base.NewHttpClient()
|
||||
// client.Timeout = time.Hour * 6
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
bytes, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("[alist_v3] response body: %s", string(bytes))
|
||||
if res.StatusCode >= 400 {
|
||||
return fmt.Errorf("request failed, status: %s", res.Status)
|
||||
}
|
||||
code := utils.Json.Get(bytes, "code").ToInt()
|
||||
if code != 200 {
|
||||
if code == 401 || code == 403 {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(bytes, "message").ToString())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
|
@ -7,18 +7,20 @@ import (
|
||||
|
||||
type Addition struct {
|
||||
driver.RootPath
|
||||
Address string `json:"url" required:"true"`
|
||||
MetaPassword string `json:"meta_password"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Token string `json:"token"`
|
||||
Address string `json:"url" required:"true"`
|
||||
MetaPassword string `json:"meta_password"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Token string `json:"token"`
|
||||
PassUAToUpsteam bool `json:"pass_ua_to_upsteam" default:"true"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "AList V3",
|
||||
LocalSort: true,
|
||||
DefaultRoot: "/",
|
||||
CheckStatus: true,
|
||||
Name: "AList V3",
|
||||
LocalSort: true,
|
||||
DefaultRoot: "/",
|
||||
CheckStatus: true,
|
||||
ProxyRangeOption: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@ -3,7 +3,6 @@ package alist_v3
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
@ -14,6 +13,9 @@ import (
|
||||
)
|
||||
|
||||
func (d *AListV3) login() error {
|
||||
if d.Username == "" {
|
||||
return nil
|
||||
}
|
||||
var resp common.Resp[LoginResp]
|
||||
_, err := d.request("/auth/login", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetBody(base.Json{
|
||||
@ -57,33 +59,3 @@ func (d *AListV3) request(api, method string, callback base.ReqCallback, retry .
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *AListV3) requestWithTimeout(api, method string, callback base.ReqCallback, timeout time.Duration, retry ...bool) ([]byte, error) {
|
||||
url := d.Address + "/api" + api
|
||||
client := base.NewRestyClient().SetTimeout(timeout)
|
||||
req := client.R()
|
||||
req.SetHeader("Authorization", d.Token)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debugf("[alist_v3] response body: %s", res.String())
|
||||
if res.StatusCode() >= 400 {
|
||||
return nil, fmt.Errorf("request failed, status: %s", res.Status())
|
||||
}
|
||||
code := utils.Json.Get(res.Body(), "code").ToInt()
|
||||
if code != 200 {
|
||||
if (code == 401 || code == 403) && !utils.IsBool(retry...) {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.requestWithTimeout(api, method, callback, timeout, true)
|
||||
}
|
||||
return nil, fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(res.Body(), "message").ToString())
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
@ -19,12 +19,12 @@ import (
|
||||
type AliyundriveOpen struct {
|
||||
model.Storage
|
||||
Addition
|
||||
base string
|
||||
|
||||
DriveId string
|
||||
|
||||
limitList func(ctx context.Context, data base.Json) (*Files, error)
|
||||
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
||||
ref *AliyundriveOpen
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Config() driver.Config {
|
||||
@ -58,7 +58,17 @@ func (d *AliyundriveOpen) Init(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) InitReference(storage driver.Driver) error {
|
||||
refStorage, ok := storage.(*AliyundriveOpen)
|
||||
if ok {
|
||||
d.ref = refStorage
|
||||
return nil
|
||||
}
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Drop(ctx context.Context) error {
|
||||
d.ref = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"default"`
|
||||
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"resource"`
|
||||
driver.RootID
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||
@ -32,11 +32,10 @@ var config = driver.Config{
|
||||
DefaultRoot: "root",
|
||||
NoOverwriteUpload: true,
|
||||
}
|
||||
var API_URL = "https://openapi.alipan.com"
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &AliyundriveOpen{
|
||||
base: "https://openapi.alipan.com",
|
||||
}
|
||||
return &AliyundriveOpen{}
|
||||
})
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ func getProofRange(input string, size int64) (*ProofRange, error) {
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error) {
|
||||
proofRange, err := getProofRange(d.AccessToken, stream.GetSize())
|
||||
proofRange, err := getProofRange(d.getAccessToken(), stream.GetSize())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ import (
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
func (d *AliyundriveOpen) _refreshToken() (string, string, error) {
|
||||
url := d.base + "/oauth/access_token"
|
||||
url := API_URL + "/oauth/access_token"
|
||||
if d.OauthTokenURL != "" && d.ClientID == "" {
|
||||
url = d.OauthTokenURL
|
||||
}
|
||||
@ -74,6 +74,9 @@ func getSub(token string) (string, error) {
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) refreshToken() error {
|
||||
if d.ref != nil {
|
||||
return d.ref.refreshToken()
|
||||
}
|
||||
refresh, access, err := d._refreshToken()
|
||||
for i := 0; i < 3; i++ {
|
||||
if err == nil {
|
||||
@ -100,7 +103,7 @@ func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback,
|
||||
func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error, *ErrResp) {
|
||||
req := base.RestyClient.R()
|
||||
// TODO check whether access_token is expired
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
req.SetHeader("Authorization", "Bearer "+d.getAccessToken())
|
||||
if method == http.MethodPost {
|
||||
req.SetHeader("Content-Type", "application/json")
|
||||
}
|
||||
@ -109,7 +112,7 @@ func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base
|
||||
}
|
||||
var e ErrResp
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, d.base+uri)
|
||||
res, err := req.Execute(method, API_URL+uri)
|
||||
if err != nil {
|
||||
if res != nil {
|
||||
log.Errorf("[aliyundrive_open] request error: %s", res.String())
|
||||
@ -118,7 +121,7 @@ func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base
|
||||
}
|
||||
isRetry := len(retry) > 0 && retry[0]
|
||||
if e.Code != "" {
|
||||
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
|
||||
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.getAccessToken() == "") {
|
||||
err = d.refreshToken()
|
||||
if err != nil {
|
||||
return nil, err, nil
|
||||
@ -176,3 +179,10 @@ func getNowTime() (time.Time, string) {
|
||||
nowTimeStr := nowTime.Format("2006-01-02T15:04:05.000Z")
|
||||
return nowTime, nowTimeStr
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getAccessToken() string {
|
||||
if d.ref != nil {
|
||||
return d.ref.getAccessToken()
|
||||
}
|
||||
return d.AccessToken
|
||||
}
|
||||
|
@ -22,21 +22,31 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
||||
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/febbox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||
_ "github.com/alist-org/alist/v3/drivers/github"
|
||||
_ "github.com/alist-org/alist/v3/drivers/github_releases"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||
_ "github.com/alist-org/alist/v3/drivers/halalcloud"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ilanzou"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
|
||||
_ "github.com/alist-org/alist/v3/drivers/kodbox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/lanzou"
|
||||
_ "github.com/alist-org/alist/v3/drivers/lenovonas_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mega"
|
||||
_ "github.com/alist-org/alist/v3/drivers/misskey"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
||||
_ "github.com/alist-org/alist/v3/drivers/netease_music"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_sharelink"
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
|
||||
_ "github.com/alist-org/alist/v3/drivers/quark_uc_tv"
|
||||
_ "github.com/alist-org/alist/v3/drivers/quqi"
|
||||
_ "github.com/alist-org/alist/v3/drivers/s3"
|
||||
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
||||
@ -45,6 +55,8 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/teambition"
|
||||
_ "github.com/alist-org/alist/v3/drivers/terabox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/thunder"
|
||||
_ "github.com/alist-org/alist/v3/drivers/thunder_browser"
|
||||
_ "github.com/alist-org/alist/v3/drivers/thunderx"
|
||||
_ "github.com/alist-org/alist/v3/drivers/trainbit"
|
||||
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
||||
_ "github.com/alist-org/alist/v3/drivers/uss"
|
||||
|
@ -17,7 +17,7 @@ type Addition struct {
|
||||
AccessToken string
|
||||
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
||||
UploadAPI string `json:"upload_api" default:"https://d.pcs.baidu.com"`
|
||||
CustomUploadPartSize int64 `json:"custom_upload_part_size" default:"0" help:"0 for auto"`
|
||||
CustomUploadPartSize int64 `json:"custom_upload_part_size" type:"number" default:"0" help:"0 for auto"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
)
|
||||
|
||||
type TokenErrResp struct {
|
||||
@ -55,11 +56,11 @@ func fileToObj(f File) *model.ObjThumb {
|
||||
if f.ServerFilename == "" {
|
||||
f.ServerFilename = path.Base(f.Path)
|
||||
}
|
||||
if f.LocalCtime == 0 {
|
||||
f.LocalCtime = f.Ctime
|
||||
if f.ServerCtime == 0 {
|
||||
f.ServerCtime = f.Ctime
|
||||
}
|
||||
if f.LocalMtime == 0 {
|
||||
f.LocalMtime = f.Mtime
|
||||
if f.ServerMtime == 0 {
|
||||
f.ServerMtime = f.Mtime
|
||||
}
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
@ -67,12 +68,12 @@ func fileToObj(f File) *model.ObjThumb {
|
||||
Path: f.Path,
|
||||
Name: f.ServerFilename,
|
||||
Size: f.Size,
|
||||
Modified: time.Unix(f.LocalMtime, 0),
|
||||
Ctime: time.Unix(f.LocalCtime, 0),
|
||||
Modified: time.Unix(f.ServerMtime, 0),
|
||||
Ctime: time.Unix(f.ServerCtime, 0),
|
||||
IsFolder: f.Isdir == 1,
|
||||
|
||||
// 直接获取的MD5是错误的
|
||||
// HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
|
||||
HashInfo: utils.NewHashInfo(utils.MD5, DecryptMd5(f.Md5)),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
|
||||
}
|
||||
|
@ -1,11 +1,14 @@
|
||||
package baidu_netdisk
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
@ -153,8 +156,6 @@ func (d *BaiduNetdisk) linkOfficial(file model.Obj, args model.LinkArgs) (*model
|
||||
u = res.Header().Get("location")
|
||||
//}
|
||||
|
||||
updateObjMd5(file, "pan.baidu.com", u)
|
||||
|
||||
return &model.Link{
|
||||
URL: u,
|
||||
Header: http.Header{
|
||||
@ -178,8 +179,6 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updateObjMd5(file, d.CustomCrackUA, resp.Info[0].Dlink)
|
||||
|
||||
return &model.Link{
|
||||
URL: resp.Info[0].Dlink,
|
||||
Header: http.Header{
|
||||
@ -229,19 +228,6 @@ func joinTime(form map[string]string, ctime, mtime int64) {
|
||||
form["local_ctime"] = strconv.FormatInt(ctime, 10)
|
||||
}
|
||||
|
||||
func updateObjMd5(obj model.Obj, userAgent, u string) {
|
||||
object := model.GetRawObject(obj)
|
||||
if object != nil {
|
||||
req, _ := http.NewRequest(http.MethodHead, u, nil)
|
||||
req.Header.Add("User-Agent", userAgent)
|
||||
resp, _ := base.HttpClient.Do(req)
|
||||
if resp != nil {
|
||||
contentMd5 := resp.Header.Get("Content-Md5")
|
||||
object.HashInfo = utils.NewHashInfo(utils.MD5, contentMd5)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
DefaultSliceSize int64 = 4 * utils.MB
|
||||
VipSliceSize = 16 * utils.MB
|
||||
@ -267,3 +253,40 @@ func (d *BaiduNetdisk) getSliceSize() int64 {
|
||||
// r = strings.ReplaceAll(r, "+", "%20")
|
||||
// return r
|
||||
// }
|
||||
|
||||
func DecryptMd5(encryptMd5 string) string {
|
||||
if _, err := hex.DecodeString(encryptMd5); err == nil {
|
||||
return encryptMd5
|
||||
}
|
||||
|
||||
var out strings.Builder
|
||||
out.Grow(len(encryptMd5))
|
||||
for i, n := 0, int64(0); i < len(encryptMd5); i++ {
|
||||
if i == 9 {
|
||||
n = int64(unicode.ToLower(rune(encryptMd5[i])) - 'g')
|
||||
} else {
|
||||
n, _ = strconv.ParseInt(encryptMd5[i:i+1], 16, 64)
|
||||
}
|
||||
out.WriteString(strconv.FormatInt(n^int64(15&i), 16))
|
||||
}
|
||||
|
||||
encryptMd5 = out.String()
|
||||
return encryptMd5[8:16] + encryptMd5[:8] + encryptMd5[24:32] + encryptMd5[16:24]
|
||||
}
|
||||
|
||||
func EncryptMd5(originalMd5 string) string {
|
||||
reversed := originalMd5[8:16] + originalMd5[:8] + originalMd5[24:32] + originalMd5[16:24]
|
||||
|
||||
var out strings.Builder
|
||||
out.Grow(len(reversed))
|
||||
for i, n := 0, int64(0); i < len(reversed); i++ {
|
||||
n, _ = strconv.ParseInt(reversed[i:i+1], 16, 64)
|
||||
n ^= int64(15 & i)
|
||||
if i == 9 {
|
||||
out.WriteRune(rune(n) + 'g')
|
||||
} else {
|
||||
out.WriteString(strconv.FormatInt(n, 16))
|
||||
}
|
||||
}
|
||||
return out.String()
|
||||
}
|
||||
|
@ -27,9 +27,10 @@ type BaiduPhoto struct {
|
||||
model.Storage
|
||||
Addition
|
||||
|
||||
AccessToken string
|
||||
Uk int64
|
||||
root model.Obj
|
||||
// AccessToken string
|
||||
Uk int64
|
||||
bdstoken string
|
||||
root model.Obj
|
||||
|
||||
uploadThread int
|
||||
}
|
||||
@ -48,9 +49,9 @@ func (d *BaiduPhoto) Init(ctx context.Context) error {
|
||||
d.uploadThread, d.UploadThread = 3, "3"
|
||||
}
|
||||
|
||||
if err := d.refreshToken(); err != nil {
|
||||
return err
|
||||
}
|
||||
// if err := d.refreshToken(); err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
// root
|
||||
if d.AlbumID != "" {
|
||||
@ -73,6 +74,10 @@ func (d *BaiduPhoto) Init(ctx context.Context) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.bdstoken, err = d.getBDStoken()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.Uk, err = strconv.ParseInt(info.YouaID, 10, 64)
|
||||
return err
|
||||
}
|
||||
@ -82,7 +87,7 @@ func (d *BaiduPhoto) GetRoot(ctx context.Context) (model.Obj, error) {
|
||||
}
|
||||
|
||||
func (d *BaiduPhoto) Drop(ctx context.Context) error {
|
||||
d.AccessToken = ""
|
||||
// d.AccessToken = ""
|
||||
d.Uk = 0
|
||||
d.root = nil
|
||||
return nil
|
||||
@ -137,13 +142,18 @@ func (d *BaiduPhoto) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
||||
case *File:
|
||||
return d.linkFile(ctx, file, args)
|
||||
case *AlbumFile:
|
||||
f, err := d.CopyAlbumFile(ctx, file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// 处理共享相册
|
||||
if d.Uk != file.Uk {
|
||||
// 有概率无法获取到链接
|
||||
// return d.linkAlbum(ctx, file, args)
|
||||
|
||||
f, err := d.CopyAlbumFile(ctx, file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.linkFile(ctx, f, args)
|
||||
}
|
||||
return d.linkFile(ctx, f, args)
|
||||
// 有概率无法获取到链接
|
||||
//return d.linkAlbum(ctx, file, args)
|
||||
return d.linkFile(ctx, &file.File, args)
|
||||
}
|
||||
return nil, errs.NotFile
|
||||
}
|
||||
@ -286,11 +296,12 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
}
|
||||
|
||||
// 尝试获取之前的进度
|
||||
precreateResp, ok := base.GetUploadProgress[*PrecreateResp](d, d.AccessToken, contentMd5)
|
||||
precreateResp, ok := base.GetUploadProgress[*PrecreateResp](d, strconv.FormatInt(d.Uk, 10), contentMd5)
|
||||
if !ok {
|
||||
_, err = d.Post(FILE_API_URL_V1+"/precreate", func(r *resty.Request) {
|
||||
r.SetContext(ctx)
|
||||
r.SetFormData(params)
|
||||
r.SetQueryParam("bdstoken", d.bdstoken)
|
||||
}, &precreateResp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -319,8 +330,8 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
"path": params["path"],
|
||||
"partseq": fmt.Sprint(partseq),
|
||||
"uploadid": precreateResp.UploadID,
|
||||
"app_id": "16051585",
|
||||
}
|
||||
|
||||
_, err = d.Post("https://c3.pcs.baidu.com/rest/2.0/pcs/superfile2", func(r *resty.Request) {
|
||||
r.SetContext(ctx)
|
||||
r.SetQueryParams(uploadParams)
|
||||
@ -337,7 +348,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
if err = threadG.Wait(); err != nil {
|
||||
if errors.Is(err, context.Canceled) {
|
||||
precreateResp.BlockList = utils.SliceFilter(precreateResp.BlockList, func(s int) bool { return s >= 0 })
|
||||
base.SaveUploadProgress(d, precreateResp, d.AccessToken, contentMd5)
|
||||
base.SaveUploadProgress(d, strconv.FormatInt(d.Uk, 10), contentMd5)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
@ -347,6 +358,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
_, err = d.Post(FILE_API_URL_V1+"/create", func(r *resty.Request) {
|
||||
r.SetContext(ctx)
|
||||
r.SetFormData(params)
|
||||
r.SetQueryParam("bdstoken", d.bdstoken)
|
||||
}, &precreateResp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -6,13 +6,14 @@ import (
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
ShowType string `json:"show_type" type:"select" options:"root,root_only_album,root_only_file" default:"root"`
|
||||
AlbumID string `json:"album_id"`
|
||||
// RefreshToken string `json:"refresh_token" required:"true"`
|
||||
Cookie string `json:"cookie" required:"true"`
|
||||
ShowType string `json:"show_type" type:"select" options:"root,root_only_album,root_only_file" default:"root"`
|
||||
AlbumID string `json:"album_id"`
|
||||
//AlbumPassword string `json:"album_password"`
|
||||
DeleteOrigin bool `json:"delete_origin"`
|
||||
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
||||
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
||||
DeleteOrigin bool `json:"delete_origin"`
|
||||
// ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
||||
// ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
||||
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ func (c *File) Thumb() string {
|
||||
}
|
||||
|
||||
func (c *File) GetHash() utils.HashInfo {
|
||||
return utils.NewHashInfo(utils.MD5, c.Md5)
|
||||
return utils.NewHashInfo(utils.MD5, DecryptMd5(c.Md5))
|
||||
}
|
||||
|
||||
/*相册部分*/
|
||||
|
@ -2,13 +2,15 @@ package baiduphoto
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
@ -21,9 +23,10 @@ const (
|
||||
FILE_API_URL_V2 = API_URL + "/file/v2"
|
||||
)
|
||||
|
||||
func (d *BaiduPhoto) Request(furl string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||
req := base.RestyClient.R().
|
||||
SetQueryParam("access_token", d.AccessToken)
|
||||
func (d *BaiduPhoto) Request(client *resty.Client, furl string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||
req := client.R().
|
||||
// SetQueryParam("access_token", d.AccessToken)
|
||||
SetHeader("Cookie", d.Cookie)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
@ -45,10 +48,10 @@ func (d *BaiduPhoto) Request(furl string, method string, callback base.ReqCallba
|
||||
return nil, fmt.Errorf("no shared albums found")
|
||||
case 50100:
|
||||
return nil, fmt.Errorf("illegal title, only supports 50 characters")
|
||||
case -6:
|
||||
if err = d.refreshToken(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// case -6:
|
||||
// if err = d.refreshToken(); err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
default:
|
||||
return nil, fmt.Errorf("errno: %d, refer to https://photo.baidu.com/union/doc", erron)
|
||||
}
|
||||
@ -63,36 +66,36 @@ func (d *BaiduPhoto) Request(furl string, method string, callback base.ReqCallba
|
||||
// return res.Body(), nil
|
||||
//}
|
||||
|
||||
func (d *BaiduPhoto) refreshToken() error {
|
||||
u := "https://openapi.baidu.com/oauth/2.0/token"
|
||||
var resp base.TokenResp
|
||||
var e TokenErrResp
|
||||
_, err := base.RestyClient.R().SetResult(&resp).SetError(&e).SetQueryParams(map[string]string{
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": d.RefreshToken,
|
||||
"client_id": d.ClientID,
|
||||
"client_secret": d.ClientSecret,
|
||||
}).Get(u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.ErrorMsg != "" {
|
||||
return &e
|
||||
}
|
||||
if resp.RefreshToken == "" {
|
||||
return errs.EmptyToken
|
||||
}
|
||||
d.AccessToken, d.RefreshToken = resp.AccessToken, resp.RefreshToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
// func (d *BaiduPhoto) refreshToken() error {
|
||||
// u := "https://openapi.baidu.com/oauth/2.0/token"
|
||||
// var resp base.TokenResp
|
||||
// var e TokenErrResp
|
||||
// _, err := base.RestyClient.R().SetResult(&resp).SetError(&e).SetQueryParams(map[string]string{
|
||||
// "grant_type": "refresh_token",
|
||||
// "refresh_token": d.RefreshToken,
|
||||
// "client_id": d.ClientID,
|
||||
// "client_secret": d.ClientSecret,
|
||||
// }).Get(u)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// if e.ErrorMsg != "" {
|
||||
// return &e
|
||||
// }
|
||||
// if resp.RefreshToken == "" {
|
||||
// return errs.EmptyToken
|
||||
// }
|
||||
// d.AccessToken, d.RefreshToken = resp.AccessToken, resp.RefreshToken
|
||||
// op.MustSaveDriverStorage(d)
|
||||
// return nil
|
||||
// }
|
||||
|
||||
func (d *BaiduPhoto) Get(furl string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||
return d.Request(furl, http.MethodGet, callback, resp)
|
||||
return d.Request(base.RestyClient, furl, http.MethodGet, callback, resp)
|
||||
}
|
||||
|
||||
func (d *BaiduPhoto) Post(furl string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||
return d.Request(furl, http.MethodPost, callback, resp)
|
||||
return d.Request(base.RestyClient, furl, http.MethodPost, callback, resp)
|
||||
}
|
||||
|
||||
// 获取所有文件
|
||||
@ -338,24 +341,29 @@ func (d *BaiduPhoto) linkAlbum(ctx context.Context, file *AlbumFile, args model.
|
||||
headers["X-Forwarded-For"] = args.IP
|
||||
}
|
||||
|
||||
res, err := base.NoRedirectClient.R().
|
||||
SetContext(ctx).
|
||||
SetHeaders(headers).
|
||||
SetQueryParams(map[string]string{
|
||||
"access_token": d.AccessToken,
|
||||
"fsid": fmt.Sprint(file.Fsid),
|
||||
"album_id": file.AlbumID,
|
||||
"tid": fmt.Sprint(file.Tid),
|
||||
"uk": fmt.Sprint(file.Uk),
|
||||
}).
|
||||
Head(ALBUM_API_URL + "/download")
|
||||
resp, err := d.Request(base.NoRedirectClient, ALBUM_API_URL+"/download", http.MethodHead, func(r *resty.Request) {
|
||||
r.SetContext(ctx)
|
||||
r.SetHeaders(headers)
|
||||
r.SetQueryParams(map[string]string{
|
||||
"fsid": fmt.Sprint(file.Fsid),
|
||||
"album_id": file.AlbumID,
|
||||
"tid": fmt.Sprint(file.Tid),
|
||||
"uk": fmt.Sprint(file.Uk),
|
||||
})
|
||||
}, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resp.StatusCode() != 302 {
|
||||
return nil, fmt.Errorf("not found 302 redirect")
|
||||
}
|
||||
|
||||
location := resp.Header().Get("Location")
|
||||
|
||||
link := &model.Link{
|
||||
URL: res.Header().Get("location"),
|
||||
URL: location,
|
||||
Header: http.Header{
|
||||
"User-Agent": []string{headers["User-Agent"]},
|
||||
"Referer": []string{"https://photo.baidu.com/"},
|
||||
@ -385,10 +393,24 @@ func (d *BaiduPhoto) linkFile(ctx context.Context, file *File, args model.LinkAr
|
||||
"fsid": fmt.Sprint(file.Fsid),
|
||||
})
|
||||
}, &downloadUrl)
|
||||
|
||||
// resp, err := d.Request(base.NoRedirectClient, FILE_API_URL_V1+"/download", http.MethodHead, func(r *resty.Request) {
|
||||
// r.SetContext(ctx)
|
||||
// r.SetHeaders(headers)
|
||||
// r.SetQueryParams(map[string]string{
|
||||
// "fsid": fmt.Sprint(file.Fsid),
|
||||
// })
|
||||
// }, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if resp.StatusCode() != 302 {
|
||||
// return nil, fmt.Errorf("not found 302 redirect")
|
||||
// }
|
||||
|
||||
// location := resp.Header().Get("Location")
|
||||
link := &model.Link{
|
||||
URL: downloadUrl.Dlink,
|
||||
Header: http.Header{
|
||||
@ -453,3 +475,55 @@ func (d *BaiduPhoto) uInfo() (*UInfo, error) {
|
||||
}
|
||||
return &info, nil
|
||||
}
|
||||
|
||||
func (d *BaiduPhoto) getBDStoken() (string, error) {
|
||||
var info struct {
|
||||
Result struct {
|
||||
Bdstoken string `json:"bdstoken"`
|
||||
Token string `json:"token"`
|
||||
Uk int64 `json:"uk"`
|
||||
} `json:"result"`
|
||||
}
|
||||
_, err := d.Get("https://pan.baidu.com/api/gettemplatevariable?fields=[%22bdstoken%22,%22token%22,%22uk%22]", nil, &info)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return info.Result.Bdstoken, nil
|
||||
}
|
||||
|
||||
func DecryptMd5(encryptMd5 string) string {
|
||||
if _, err := hex.DecodeString(encryptMd5); err == nil {
|
||||
return encryptMd5
|
||||
}
|
||||
|
||||
var out strings.Builder
|
||||
out.Grow(len(encryptMd5))
|
||||
for i, n := 0, int64(0); i < len(encryptMd5); i++ {
|
||||
if i == 9 {
|
||||
n = int64(unicode.ToLower(rune(encryptMd5[i])) - 'g')
|
||||
} else {
|
||||
n, _ = strconv.ParseInt(encryptMd5[i:i+1], 16, 64)
|
||||
}
|
||||
out.WriteString(strconv.FormatInt(n^int64(15&i), 16))
|
||||
}
|
||||
|
||||
encryptMd5 = out.String()
|
||||
return encryptMd5[8:16] + encryptMd5[:8] + encryptMd5[24:32] + encryptMd5[16:24]
|
||||
}
|
||||
|
||||
func EncryptMd5(originalMd5 string) string {
|
||||
reversed := originalMd5[8:16] + originalMd5[:8] + originalMd5[24:32] + originalMd5[16:24]
|
||||
|
||||
var out strings.Builder
|
||||
out.Grow(len(reversed))
|
||||
for i, n := 0, int64(0); i < len(reversed); i++ {
|
||||
n, _ = strconv.ParseInt(reversed[i:i+1], 16, 64)
|
||||
n ^= int64(15 & i)
|
||||
if i == 9 {
|
||||
out.WriteRune(rune(n) + 'g')
|
||||
} else {
|
||||
out.WriteString(strconv.FormatInt(n, 16))
|
||||
}
|
||||
}
|
||||
return out.String()
|
||||
}
|
||||
|
@ -67,7 +67,9 @@ func (d *ChaoXing) Init(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Drop(ctx context.Context) error {
|
||||
d.cron.Stop()
|
||||
if d.cron != nil {
|
||||
d.cron.Stop()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -191,33 +191,33 @@ type UploadFileDataRsp struct {
|
||||
Resid int64 `json:"resid"`
|
||||
Puid int `json:"puid"`
|
||||
Data struct {
|
||||
DisableOpt bool `json:"disableOpt"`
|
||||
Resid int64 `json:"resid"`
|
||||
Crc string `json:"crc"`
|
||||
Puid int `json:"puid"`
|
||||
Isfile bool `json:"isfile"`
|
||||
Pantype string `json:"pantype"`
|
||||
Size int `json:"size"`
|
||||
Name string `json:"name"`
|
||||
ObjectID string `json:"objectId"`
|
||||
Restype string `json:"restype"`
|
||||
UploadDate time.Time `json:"uploadDate"`
|
||||
ModifyDate time.Time `json:"modifyDate"`
|
||||
UploadDateFormat string `json:"uploadDateFormat"`
|
||||
Residstr string `json:"residstr"`
|
||||
Suffix string `json:"suffix"`
|
||||
Preview string `json:"preview"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Creator int `json:"creator"`
|
||||
Duration int `json:"duration"`
|
||||
IsImg bool `json:"isImg"`
|
||||
PreviewURL string `json:"previewUrl"`
|
||||
Filetype string `json:"filetype"`
|
||||
Filepath string `json:"filepath"`
|
||||
Sort int `json:"sort"`
|
||||
Topsort int `json:"topsort"`
|
||||
ResTypeValue int `json:"resTypeValue"`
|
||||
Extinfo string `json:"extinfo"`
|
||||
DisableOpt bool `json:"disableOpt"`
|
||||
Resid int64 `json:"resid"`
|
||||
Crc string `json:"crc"`
|
||||
Puid int `json:"puid"`
|
||||
Isfile bool `json:"isfile"`
|
||||
Pantype string `json:"pantype"`
|
||||
Size int `json:"size"`
|
||||
Name string `json:"name"`
|
||||
ObjectID string `json:"objectId"`
|
||||
Restype string `json:"restype"`
|
||||
UploadDate int64 `json:"uploadDate"`
|
||||
ModifyDate int64 `json:"modifyDate"`
|
||||
UploadDateFormat string `json:"uploadDateFormat"`
|
||||
Residstr string `json:"residstr"`
|
||||
Suffix string `json:"suffix"`
|
||||
Preview string `json:"preview"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Creator int `json:"creator"`
|
||||
Duration int `json:"duration"`
|
||||
IsImg bool `json:"isImg"`
|
||||
PreviewURL string `json:"previewUrl"`
|
||||
Filetype string `json:"filetype"`
|
||||
Filepath string `json:"filepath"`
|
||||
Sort int `json:"sort"`
|
||||
Topsort int `json:"topsort"`
|
||||
ResTypeValue int `json:"resTypeValue"`
|
||||
Extinfo string `json:"extinfo"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
@ -225,33 +225,33 @@ type UploadDoneParam struct {
|
||||
Cataid string `json:"cataid"`
|
||||
Key string `json:"key"`
|
||||
Param struct {
|
||||
DisableOpt bool `json:"disableOpt"`
|
||||
Resid int64 `json:"resid"`
|
||||
Crc string `json:"crc"`
|
||||
Puid int `json:"puid"`
|
||||
Isfile bool `json:"isfile"`
|
||||
Pantype string `json:"pantype"`
|
||||
Size int `json:"size"`
|
||||
Name string `json:"name"`
|
||||
ObjectID string `json:"objectId"`
|
||||
Restype string `json:"restype"`
|
||||
UploadDate time.Time `json:"uploadDate"`
|
||||
ModifyDate time.Time `json:"modifyDate"`
|
||||
UploadDateFormat string `json:"uploadDateFormat"`
|
||||
Residstr string `json:"residstr"`
|
||||
Suffix string `json:"suffix"`
|
||||
Preview string `json:"preview"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Creator int `json:"creator"`
|
||||
Duration int `json:"duration"`
|
||||
IsImg bool `json:"isImg"`
|
||||
PreviewURL string `json:"previewUrl"`
|
||||
Filetype string `json:"filetype"`
|
||||
Filepath string `json:"filepath"`
|
||||
Sort int `json:"sort"`
|
||||
Topsort int `json:"topsort"`
|
||||
ResTypeValue int `json:"resTypeValue"`
|
||||
Extinfo string `json:"extinfo"`
|
||||
DisableOpt bool `json:"disableOpt"`
|
||||
Resid int64 `json:"resid"`
|
||||
Crc string `json:"crc"`
|
||||
Puid int `json:"puid"`
|
||||
Isfile bool `json:"isfile"`
|
||||
Pantype string `json:"pantype"`
|
||||
Size int `json:"size"`
|
||||
Name string `json:"name"`
|
||||
ObjectID string `json:"objectId"`
|
||||
Restype string `json:"restype"`
|
||||
UploadDate int64 `json:"uploadDate"`
|
||||
ModifyDate int64 `json:"modifyDate"`
|
||||
UploadDateFormat string `json:"uploadDateFormat"`
|
||||
Residstr string `json:"residstr"`
|
||||
Suffix string `json:"suffix"`
|
||||
Preview string `json:"preview"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Creator int `json:"creator"`
|
||||
Duration int `json:"duration"`
|
||||
IsImg bool `json:"isImg"`
|
||||
PreviewURL string `json:"previewUrl"`
|
||||
Filetype string `json:"filetype"`
|
||||
Filepath string `json:"filepath"`
|
||||
Sort int `json:"sort"`
|
||||
Topsort int `json:"topsort"`
|
||||
ResTypeValue int `json:"resTypeValue"`
|
||||
Extinfo string `json:"extinfo"`
|
||||
} `json:"param"`
|
||||
}
|
||||
|
||||
|
@ -4,11 +4,13 @@ import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
@ -90,7 +92,7 @@ func (d *Cloudreve) MakeDir(ctx context.Context, parentDir model.Obj, dirName st
|
||||
func (d *Cloudreve) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
body := base.Json{
|
||||
"action": "move",
|
||||
"src_dir": srcObj.GetPath(),
|
||||
"src_dir": path.Dir(srcObj.GetPath()),
|
||||
"dst": dstDir.GetPath(),
|
||||
"src": convertSrc(srcObj),
|
||||
}
|
||||
@ -112,7 +114,7 @@ func (d *Cloudreve) Rename(ctx context.Context, srcObj model.Obj, newName string
|
||||
|
||||
func (d *Cloudreve) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
body := base.Json{
|
||||
"src_dir": srcObj.GetPath(),
|
||||
"src_dir": path.Dir(srcObj.GetPath()),
|
||||
"dst": dstDir.GetPath(),
|
||||
"src": convertSrc(srcObj),
|
||||
}
|
||||
@ -133,6 +135,8 @@ func (d *Cloudreve) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
||||
if io.ReadCloser(stream) == http.NoBody {
|
||||
return d.create(ctx, dstDir, stream)
|
||||
}
|
||||
|
||||
// 获取存储策略
|
||||
var r DirectoryResp
|
||||
err := d.request(http.MethodGet, "/directory"+dstDir.GetPath(), nil, &r)
|
||||
if err != nil {
|
||||
@ -145,6 +149,8 @@ func (d *Cloudreve) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
||||
"policy_id": r.Policy.Id,
|
||||
"last_modified": stream.ModTime().Unix(),
|
||||
}
|
||||
|
||||
// 获取上传会话信息
|
||||
var u UploadInfo
|
||||
err = d.request(http.MethodPut, "/file/upload", func(req *resty.Request) {
|
||||
req.SetBody(uploadBody)
|
||||
@ -152,36 +158,50 @@ func (d *Cloudreve) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var chunkSize = u.ChunkSize
|
||||
var buf []byte
|
||||
var chunk int
|
||||
for {
|
||||
var n int
|
||||
buf = make([]byte, chunkSize)
|
||||
n, err = io.ReadAtLeast(stream, buf, chunkSize)
|
||||
if err != nil && err != io.ErrUnexpectedEOF {
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
|
||||
// 根据存储方式选择分片上传的方法
|
||||
switch r.Policy.Type {
|
||||
case "onedrive":
|
||||
err = d.upOneDrive(ctx, stream, u, up)
|
||||
case "remote": // 从机存储
|
||||
err = d.upRemote(ctx, stream, u, up)
|
||||
case "local": // 本机存储
|
||||
var chunkSize = u.ChunkSize
|
||||
var buf []byte
|
||||
var chunk int
|
||||
for {
|
||||
var n int
|
||||
buf = make([]byte, chunkSize)
|
||||
n, err = io.ReadAtLeast(stream, buf, chunkSize)
|
||||
if err != nil && err != io.ErrUnexpectedEOF {
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
return err
|
||||
if n == 0 {
|
||||
break
|
||||
}
|
||||
buf = buf[:n]
|
||||
err = d.request(http.MethodPost, "/file/upload/"+u.SessionID+"/"+strconv.Itoa(chunk), func(req *resty.Request) {
|
||||
req.SetHeader("Content-Type", "application/octet-stream")
|
||||
req.SetHeader("Content-Length", strconv.Itoa(n))
|
||||
req.SetBody(buf)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
chunk++
|
||||
}
|
||||
|
||||
if n == 0 {
|
||||
break
|
||||
}
|
||||
buf = buf[:n]
|
||||
err = d.request(http.MethodPost, "/file/upload/"+u.SessionID+"/"+strconv.Itoa(chunk), func(req *resty.Request) {
|
||||
req.SetHeader("Content-Type", "application/octet-stream")
|
||||
req.SetHeader("Content-Length", strconv.Itoa(n))
|
||||
req.SetBody(buf)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
chunk++
|
||||
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
}
|
||||
return err
|
||||
if err != nil {
|
||||
// 删除失败的会话
|
||||
err = d.request(http.MethodDelete, "/file/upload/"+u.SessionID, nil, nil)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Cloudreve) create(ctx context.Context, dir model.Obj, file model.Obj) error {
|
||||
|
@ -21,9 +21,11 @@ type Policy struct {
|
||||
}
|
||||
|
||||
type UploadInfo struct {
|
||||
SessionID string `json:"sessionID"`
|
||||
ChunkSize int `json:"chunkSize"`
|
||||
Expires int `json:"expires"`
|
||||
SessionID string `json:"sessionID"`
|
||||
ChunkSize int `json:"chunkSize"`
|
||||
Expires int `json:"expires"`
|
||||
UploadURLs []string `json:"uploadURLs"`
|
||||
Credential string `json:"credential,omitempty"`
|
||||
}
|
||||
|
||||
type DirectoryResp struct {
|
||||
|
@ -1,16 +1,23 @@
|
||||
package cloudreve
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/setting"
|
||||
"github.com/alist-org/alist/v3/pkg/cookie"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
json "github.com/json-iterator/go"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
@ -172,3 +179,95 @@ func (d *Cloudreve) GetThumb(file Object) (model.Thumbnail, error) {
|
||||
Thumbnail: resp.Header().Get("Location"),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Cloudreve) upRemote(ctx context.Context, stream model.FileStreamer, u UploadInfo, up driver.UpdateProgress) error {
|
||||
uploadUrl := u.UploadURLs[0]
|
||||
credential := u.Credential
|
||||
var finish int64 = 0
|
||||
var chunk int = 0
|
||||
DEFAULT := int64(u.ChunkSize)
|
||||
for finish < stream.GetSize() {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
utils.Log.Debugf("[Cloudreve-Remote] upload: %d", finish)
|
||||
var byteSize = DEFAULT
|
||||
left := stream.GetSize() - finish
|
||||
if left < DEFAULT {
|
||||
byteSize = left
|
||||
}
|
||||
byteData := make([]byte, byteSize)
|
||||
n, err := io.ReadFull(stream, byteData)
|
||||
utils.Log.Debug(err, n)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req, err := http.NewRequest("POST", uploadUrl+"?chunk="+strconv.Itoa(chunk), bytes.NewBuffer(byteData))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Length", strconv.Itoa(int(byteSize)))
|
||||
req.Header.Set("Authorization", fmt.Sprint(credential))
|
||||
finish += byteSize
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res.Body.Close()
|
||||
up(float64(finish) * 100 / float64(stream.GetSize()))
|
||||
chunk++
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Cloudreve) upOneDrive(ctx context.Context, stream model.FileStreamer, u UploadInfo, up driver.UpdateProgress) error {
|
||||
uploadUrl := u.UploadURLs[0]
|
||||
var finish int64 = 0
|
||||
DEFAULT := int64(u.ChunkSize)
|
||||
for finish < stream.GetSize() {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
utils.Log.Debugf("[Cloudreve-OneDrive] upload: %d", finish)
|
||||
var byteSize = DEFAULT
|
||||
left := stream.GetSize() - finish
|
||||
if left < DEFAULT {
|
||||
byteSize = left
|
||||
}
|
||||
byteData := make([]byte, byteSize)
|
||||
n, err := io.ReadFull(stream, byteData)
|
||||
utils.Log.Debug(err, n)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, bytes.NewBuffer(byteData))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Length", strconv.Itoa(int(byteSize)))
|
||||
req.Header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", finish, finish+byteSize-1, stream.GetSize()))
|
||||
finish += byteSize
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
|
||||
if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
|
||||
data, _ := io.ReadAll(res.Body)
|
||||
res.Body.Close()
|
||||
return errors.New(string(data))
|
||||
}
|
||||
res.Body.Close()
|
||||
up(float64(finish) * 100 / float64(stream.GetSize()))
|
||||
}
|
||||
// 上传成功发送回调请求
|
||||
err := d.request(http.MethodPost, "/callback/onedrive/finish/"+u.SessionID, func(req *resty.Request) {
|
||||
req.SetBody("{}")
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/internal/sign"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
@ -160,7 +161,11 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
||||
// discarding hash as it's encrypted
|
||||
}
|
||||
if d.Thumbnail && thumb == "" {
|
||||
thumb = utils.EncodePath(common.GetApiUrl(nil)+stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
|
||||
thumbPath := stdpath.Join(args.ReqPath, ".thumbnails", name+".webp")
|
||||
thumb = fmt.Sprintf("%s/d%s?sign=%s",
|
||||
common.GetApiUrl(common.GetHttpReq(ctx)),
|
||||
utils.EncodePath(thumbPath, true),
|
||||
sign.Sign(thumbPath))
|
||||
}
|
||||
if !ok && !d.Thumbnail {
|
||||
result = append(result, &objRes)
|
||||
@ -270,7 +275,6 @@ func (d *Crypt) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
||||
rrc = converted
|
||||
}
|
||||
if rrc != nil {
|
||||
//remoteRangeReader, err :=
|
||||
remoteReader, err := rrc.RangeRead(ctx, http_range.Range{Start: underlyingOffset, Length: length})
|
||||
remoteClosers.AddClosers(rrc.GetClosers())
|
||||
if err != nil {
|
||||
@ -283,10 +287,8 @@ func (d *Crypt) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//remoteClosers.Add(remoteLink.MFile)
|
||||
//keep reuse same MFile and close at last.
|
||||
remoteClosers.Add(remoteLink.MFile)
|
||||
return io.NopCloser(remoteLink.MFile), nil
|
||||
// 可以直接返回,读取完也不会调用Close,直到连接断开Close
|
||||
return remoteLink.MFile, nil
|
||||
}
|
||||
|
||||
return nil, errs.NotSupport
|
||||
|
132
drivers/febbox/driver.go
Normal file
132
drivers/febbox/driver.go
Normal file
@ -0,0 +1,132 @@
|
||||
package febbox
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/clientcredentials"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type FebBox struct {
|
||||
model.Storage
|
||||
Addition
|
||||
accessToken string
|
||||
oauth2Token oauth2.TokenSource
|
||||
}
|
||||
|
||||
func (d *FebBox) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *FebBox) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *FebBox) Init(ctx context.Context) error {
|
||||
// 初始化 oauth2Config
|
||||
oauth2Config := &clientcredentials.Config{
|
||||
ClientID: d.ClientID,
|
||||
ClientSecret: d.ClientSecret,
|
||||
AuthStyle: oauth2.AuthStyleInParams,
|
||||
TokenURL: "https://api.febbox.com/oauth/token",
|
||||
}
|
||||
|
||||
d.initializeOAuth2Token(ctx, oauth2Config, d.Addition.RefreshToken)
|
||||
|
||||
token, err := d.oauth2Token.Token()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.accessToken = token.AccessToken
|
||||
d.Addition.RefreshToken = token.RefreshToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *FebBox) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *FebBox) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.getFilesList(dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return fileToObj(src), nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *FebBox) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
var ip string
|
||||
if d.Addition.UserIP != "" {
|
||||
ip = d.Addition.UserIP
|
||||
} else {
|
||||
ip = args.IP
|
||||
}
|
||||
|
||||
url, err := d.getDownloadLink(file.GetID(), ip)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{
|
||||
URL: url,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
err := d.makeDir(parentDir.GetID(), dirName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
err := d.move(srcObj.GetID(), dstDir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
err := d.rename(srcObj.GetID(), newName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
err := d.copy(srcObj.GetID(), dstDir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) Remove(ctx context.Context, obj model.Obj) error {
|
||||
err := d.remove(obj.GetID())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *FebBox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*FebBox)(nil)
|
36
drivers/febbox/meta.go
Normal file
36
drivers/febbox/meta.go
Normal file
@ -0,0 +1,36 @@
|
||||
package febbox
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootID
|
||||
ClientID string `json:"client_id" required:"true" default:""`
|
||||
ClientSecret string `json:"client_secret" required:"true" default:""`
|
||||
RefreshToken string
|
||||
SortRule string `json:"sort_rule" required:"true" type:"select" options:"size_asc,size_desc,name_asc,name_desc,update_asc,update_desc,ext_asc,ext_desc" default:"name_asc"`
|
||||
PageSize int64 `json:"page_size" required:"true" type:"number" default:"100" help:"list api per page size of FebBox driver"`
|
||||
UserIP string `json:"user_ip" default:"" help:"user ip address for download link which can speed up the download"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "FebBox",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: true,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "0",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &FebBox{}
|
||||
})
|
||||
}
|
88
drivers/febbox/oauth2.go
Normal file
88
drivers/febbox/oauth2.go
Normal file
@ -0,0 +1,88 @@
|
||||
package febbox
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/clientcredentials"
|
||||
)
|
||||
|
||||
type customTokenSource struct {
|
||||
config *clientcredentials.Config
|
||||
ctx context.Context
|
||||
refreshToken string
|
||||
}
|
||||
|
||||
func (c *customTokenSource) Token() (*oauth2.Token, error) {
|
||||
v := url.Values{}
|
||||
if c.refreshToken != "" {
|
||||
v.Set("grant_type", "refresh_token")
|
||||
v.Set("refresh_token", c.refreshToken)
|
||||
} else {
|
||||
v.Set("grant_type", "client_credentials")
|
||||
}
|
||||
|
||||
v.Set("client_id", c.config.ClientID)
|
||||
v.Set("client_secret", c.config.ClientSecret)
|
||||
|
||||
req, err := http.NewRequest("POST", c.config.TokenURL, strings.NewReader(v.Encode()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req.WithContext(c.ctx))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, errors.New("oauth2: cannot fetch token")
|
||||
}
|
||||
|
||||
var tokenResp struct {
|
||||
Code int `json:"code"`
|
||||
Msg string `json:"msg"`
|
||||
Data struct {
|
||||
AccessToken string `json:"access_token"`
|
||||
ExpiresIn int64 `json:"expires_in"`
|
||||
TokenType string `json:"token_type"`
|
||||
Scope string `json:"scope"`
|
||||
RefreshToken string `json:"refresh_token"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
if err := json.NewDecoder(resp.Body).Decode(&tokenResp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if tokenResp.Code != 1 {
|
||||
return nil, errors.New("oauth2: server response error")
|
||||
}
|
||||
|
||||
c.refreshToken = tokenResp.Data.RefreshToken
|
||||
|
||||
token := &oauth2.Token{
|
||||
AccessToken: tokenResp.Data.AccessToken,
|
||||
TokenType: tokenResp.Data.TokenType,
|
||||
RefreshToken: tokenResp.Data.RefreshToken,
|
||||
Expiry: time.Now().Add(time.Duration(tokenResp.Data.ExpiresIn) * time.Second),
|
||||
}
|
||||
|
||||
return token, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) initializeOAuth2Token(ctx context.Context, oauth2Config *clientcredentials.Config, refreshToken string) {
|
||||
d.oauth2Token = oauth2.ReuseTokenSource(nil, &customTokenSource{
|
||||
config: oauth2Config,
|
||||
ctx: ctx,
|
||||
refreshToken: refreshToken,
|
||||
})
|
||||
}
|
123
drivers/febbox/types.go
Normal file
123
drivers/febbox/types.go
Normal file
@ -0,0 +1,123 @@
|
||||
package febbox
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ErrResp struct {
|
||||
ErrorCode int64 `json:"code"`
|
||||
ErrorMsg string `json:"msg"`
|
||||
ServerRunTime float64 `json:"server_runtime"`
|
||||
ServerName string `json:"server_name"`
|
||||
}
|
||||
|
||||
func (e *ErrResp) IsError() bool {
|
||||
return e.ErrorCode != 0 || e.ErrorMsg != "" || e.ServerRunTime != 0 || e.ServerName != ""
|
||||
}
|
||||
|
||||
func (e *ErrResp) Error() string {
|
||||
return fmt.Sprintf("ErrorCode: %d ,Error: %s ,ServerRunTime: %f ,ServerName: %s", e.ErrorCode, e.ErrorMsg, e.ServerRunTime, e.ServerName)
|
||||
}
|
||||
|
||||
type FileListResp struct {
|
||||
Code int `json:"code"`
|
||||
Msg string `json:"msg"`
|
||||
Data struct {
|
||||
FileList []File `json:"file_list"`
|
||||
ShowType string `json:"show_type"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type Rules struct {
|
||||
AllowCopy int64 `json:"allow_copy"`
|
||||
AllowDelete int64 `json:"allow_delete"`
|
||||
AllowDownload int64 `json:"allow_download"`
|
||||
AllowComment int64 `json:"allow_comment"`
|
||||
HideLocation int64 `json:"hide_location"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Fid int64 `json:"fid"`
|
||||
UID int64 `json:"uid"`
|
||||
FileSize int64 `json:"file_size"`
|
||||
Path string `json:"path"`
|
||||
FileName string `json:"file_name"`
|
||||
Ext string `json:"ext"`
|
||||
AddTime int64 `json:"add_time"`
|
||||
FileCreateTime int64 `json:"file_create_time"`
|
||||
FileUpdateTime int64 `json:"file_update_time"`
|
||||
ParentID int64 `json:"parent_id"`
|
||||
UpdateTime int64 `json:"update_time"`
|
||||
LastOpenTime int64 `json:"last_open_time"`
|
||||
IsDir int64 `json:"is_dir"`
|
||||
Epub int64 `json:"epub"`
|
||||
IsMusicList int64 `json:"is_music_list"`
|
||||
OssFid int64 `json:"oss_fid"`
|
||||
Faststart int64 `json:"faststart"`
|
||||
HasVideoQuality int64 `json:"has_video_quality"`
|
||||
TotalDownload int64 `json:"total_download"`
|
||||
Status int64 `json:"status"`
|
||||
Remark string `json:"remark"`
|
||||
OldHash string `json:"old_hash"`
|
||||
Hash string `json:"hash"`
|
||||
HashType string `json:"hash_type"`
|
||||
FromUID int64 `json:"from_uid"`
|
||||
FidOrg int64 `json:"fid_org"`
|
||||
ShareID int64 `json:"share_id"`
|
||||
InvitePermission int64 `json:"invite_permission"`
|
||||
ThumbSmall string `json:"thumb_small"`
|
||||
ThumbSmallWidth int64 `json:"thumb_small_width"`
|
||||
ThumbSmallHeight int64 `json:"thumb_small_height"`
|
||||
Thumb string `json:"thumb"`
|
||||
ThumbWidth int64 `json:"thumb_width"`
|
||||
ThumbHeight int64 `json:"thumb_height"`
|
||||
ThumbBig string `json:"thumb_big"`
|
||||
ThumbBigWidth int64 `json:"thumb_big_width"`
|
||||
ThumbBigHeight int64 `json:"thumb_big_height"`
|
||||
IsCustomThumb int64 `json:"is_custom_thumb"`
|
||||
Photos int64 `json:"photos"`
|
||||
IsAlbum int64 `json:"is_album"`
|
||||
ReadOnly int64 `json:"read_only"`
|
||||
Rules Rules `json:"rules"`
|
||||
IsShared int64 `json:"is_shared"`
|
||||
}
|
||||
|
||||
func fileToObj(f File) *model.ObjThumb {
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: strconv.FormatInt(f.Fid, 10),
|
||||
Name: f.FileName,
|
||||
Size: f.FileSize,
|
||||
Ctime: time.Unix(f.FileCreateTime, 0),
|
||||
Modified: time.Unix(f.FileUpdateTime, 0),
|
||||
IsFolder: f.IsDir == 1,
|
||||
HashInfo: utils.NewHashInfo(hash_extend.GCID, f.Hash),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{
|
||||
Thumbnail: f.Thumb,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type FileDownloadResp struct {
|
||||
Code int `json:"code"`
|
||||
Msg string `json:"msg"`
|
||||
Data []struct {
|
||||
Error int `json:"error"`
|
||||
DownloadURL string `json:"download_url"`
|
||||
Hash string `json:"hash"`
|
||||
HashType string `json:"hash_type"`
|
||||
Fid int `json:"fid"`
|
||||
FileName string `json:"file_name"`
|
||||
ParentID int `json:"parent_id"`
|
||||
FileSize int `json:"file_size"`
|
||||
Ext string `json:"ext"`
|
||||
Thumb string `json:"thumb"`
|
||||
VipLink int `json:"vip_link"`
|
||||
} `json:"data"`
|
||||
}
|
224
drivers/febbox/util.go
Normal file
224
drivers/febbox/util.go
Normal file
@ -0,0 +1,224 @@
|
||||
package febbox
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func (d *FebBox) refreshTokenByOAuth2() error {
|
||||
token, err := d.oauth2Token.Token()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.Status = "work"
|
||||
d.accessToken = token.AccessToken
|
||||
d.Addition.RefreshToken = token.RefreshToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *FebBox) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
// 使用oauth2 获取 access_token
|
||||
token, err := d.oauth2Token.Token()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetAuthScheme(token.TokenType).SetAuthToken(token.AccessToken)
|
||||
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
var e ErrResp
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch e.ErrorCode {
|
||||
case 0:
|
||||
return res.Body(), nil
|
||||
case 1:
|
||||
return res.Body(), nil
|
||||
case -10001:
|
||||
if e.ServerName != "" {
|
||||
// access_token 过期
|
||||
if err = d.refreshTokenByOAuth2(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(url, method, callback, resp)
|
||||
} else {
|
||||
return nil, errors.New(e.Error())
|
||||
}
|
||||
default:
|
||||
return nil, errors.New(e.Error())
|
||||
}
|
||||
}
|
||||
|
||||
func (d *FebBox) getFilesList(id string) ([]File, error) {
|
||||
if d.PageSize <= 0 {
|
||||
d.PageSize = 100
|
||||
}
|
||||
res, err := d.listWithLimit(id, d.PageSize)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return *res, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) listWithLimit(dirID string, pageLimit int64) (*[]File, error) {
|
||||
var files []File
|
||||
page := int64(1)
|
||||
for {
|
||||
result, err := d.getFiles(dirID, page, pageLimit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, *result...)
|
||||
if int64(len(*result)) < pageLimit {
|
||||
break
|
||||
} else {
|
||||
page++
|
||||
}
|
||||
}
|
||||
return &files, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) getFiles(dirID string, page, pageLimit int64) (*[]File, error) {
|
||||
var fileList FileListResp
|
||||
queryParams := map[string]string{
|
||||
"module": "file_list",
|
||||
"parent_id": dirID,
|
||||
"page": strconv.FormatInt(page, 10),
|
||||
"pagelimit": strconv.FormatInt(pageLimit, 10),
|
||||
"order": d.Addition.SortRule,
|
||||
}
|
||||
|
||||
res, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetMultipartFormData(queryParams)
|
||||
}, &fileList)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(res, &fileList); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &fileList.Data.FileList, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) getDownloadLink(id string, ip string) (string, error) {
|
||||
var fileDownloadResp FileDownloadResp
|
||||
queryParams := map[string]string{
|
||||
"module": "file_get_download_url",
|
||||
"fids[]": id,
|
||||
"ip": ip,
|
||||
}
|
||||
|
||||
res, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetMultipartFormData(queryParams)
|
||||
}, &fileDownloadResp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(res, &fileDownloadResp); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fileDownloadResp.Data[0].DownloadURL, nil
|
||||
}
|
||||
|
||||
func (d *FebBox) makeDir(id string, name string) error {
|
||||
queryParams := map[string]string{
|
||||
"module": "create_dir",
|
||||
"parent_id": id,
|
||||
"name": name,
|
||||
}
|
||||
|
||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetMultipartFormData(queryParams)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *FebBox) move(id string, id2 string) error {
|
||||
queryParams := map[string]string{
|
||||
"module": "file_move",
|
||||
"fids[]": id,
|
||||
"to": id2,
|
||||
}
|
||||
|
||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetMultipartFormData(queryParams)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *FebBox) rename(id string, name string) error {
|
||||
queryParams := map[string]string{
|
||||
"module": "file_rename",
|
||||
"fid": id,
|
||||
"name": name,
|
||||
}
|
||||
|
||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetMultipartFormData(queryParams)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *FebBox) copy(id string, id2 string) error {
|
||||
queryParams := map[string]string{
|
||||
"module": "file_copy",
|
||||
"fids[]": id,
|
||||
"to": id2,
|
||||
}
|
||||
|
||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetMultipartFormData(queryParams)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *FebBox) remove(id string) error {
|
||||
queryParams := map[string]string{
|
||||
"module": "file_delete",
|
||||
"fids[]": id,
|
||||
}
|
||||
|
||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetMultipartFormData(queryParams)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -39,7 +39,7 @@ func (d *FTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]m
|
||||
if err := d.login(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
entries, err := d.conn.List(dir.GetPath())
|
||||
entries, err := d.conn.List(encode(dir.GetPath(), d.Encoding))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -49,7 +49,7 @@ func (d *FTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]m
|
||||
continue
|
||||
}
|
||||
f := model.Object{
|
||||
Name: entry.Name,
|
||||
Name: decode(entry.Name, d.Encoding),
|
||||
Size: int64(entry.Size),
|
||||
Modified: entry.Time,
|
||||
IsFolder: entry.Type == ftp.EntryTypeFolder,
|
||||
@ -64,7 +64,7 @@ func (d *FTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r := NewFileReader(d.conn, file.GetPath(), file.GetSize())
|
||||
r := NewFileReader(d.conn, encode(file.GetPath(), d.Encoding), file.GetSize())
|
||||
link := &model.Link{
|
||||
MFile: r,
|
||||
}
|
||||
@ -75,21 +75,27 @@ func (d *FTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string)
|
||||
if err := d.login(); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.conn.MakeDir(stdpath.Join(parentDir.GetPath(), dirName))
|
||||
return d.conn.MakeDir(encode(stdpath.Join(parentDir.GetPath(), dirName), d.Encoding))
|
||||
}
|
||||
|
||||
func (d *FTP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
if err := d.login(); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.conn.Rename(srcObj.GetPath(), stdpath.Join(dstDir.GetPath(), srcObj.GetName()))
|
||||
return d.conn.Rename(
|
||||
encode(srcObj.GetPath(), d.Encoding),
|
||||
encode(stdpath.Join(dstDir.GetPath(), srcObj.GetName()), d.Encoding),
|
||||
)
|
||||
}
|
||||
|
||||
func (d *FTP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
if err := d.login(); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.conn.Rename(srcObj.GetPath(), stdpath.Join(stdpath.Dir(srcObj.GetPath()), newName))
|
||||
return d.conn.Rename(
|
||||
encode(srcObj.GetPath(), d.Encoding),
|
||||
encode(stdpath.Join(stdpath.Dir(srcObj.GetPath()), newName), d.Encoding),
|
||||
)
|
||||
}
|
||||
|
||||
func (d *FTP) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
@ -100,10 +106,11 @@ func (d *FTP) Remove(ctx context.Context, obj model.Obj) error {
|
||||
if err := d.login(); err != nil {
|
||||
return err
|
||||
}
|
||||
path := encode(obj.GetPath(), d.Encoding)
|
||||
if obj.IsDir() {
|
||||
return d.conn.RemoveDirRecur(obj.GetPath())
|
||||
return d.conn.RemoveDirRecur(path)
|
||||
} else {
|
||||
return d.conn.Delete(obj.GetPath())
|
||||
return d.conn.Delete(path)
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,7 +119,8 @@ func (d *FTP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStream
|
||||
return err
|
||||
}
|
||||
// TODO: support cancel
|
||||
return d.conn.Stor(stdpath.Join(dstDir.GetPath(), stream.GetName()), stream)
|
||||
path := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
||||
return d.conn.Stor(encode(path, d.Encoding), stream)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*FTP)(nil)
|
||||
|
@ -3,10 +3,28 @@ package ftp
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/axgle/mahonia"
|
||||
)
|
||||
|
||||
func encode(str string, encoding string) string {
|
||||
if encoding == "" {
|
||||
return str
|
||||
}
|
||||
encoder := mahonia.NewEncoder(encoding)
|
||||
return encoder.ConvertString(str)
|
||||
}
|
||||
|
||||
func decode(str string, encoding string) string {
|
||||
if encoding == "" {
|
||||
return str
|
||||
}
|
||||
decoder := mahonia.NewDecoder(encoding)
|
||||
return decoder.ConvertString(str)
|
||||
}
|
||||
|
||||
type Addition struct {
|
||||
Address string `json:"address" required:"true"`
|
||||
Encoding string `json:"encoding" required:"true"`
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
driver.RootPath
|
||||
|
929
drivers/github/driver.go
Normal file
929
drivers/github/driver.go
Normal file
@ -0,0 +1,929 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
"sync"
|
||||
"text/template"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Github struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *resty.Client
|
||||
mkdirMsgTmpl *template.Template
|
||||
deleteMsgTmpl *template.Template
|
||||
putMsgTmpl *template.Template
|
||||
renameMsgTmpl *template.Template
|
||||
copyMsgTmpl *template.Template
|
||||
moveMsgTmpl *template.Template
|
||||
isOnBranch bool
|
||||
commitMutex sync.Mutex
|
||||
}
|
||||
|
||||
func (d *Github) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Github) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Github) Init(ctx context.Context) error {
|
||||
d.RootFolderPath = utils.FixAndCleanPath(d.RootFolderPath)
|
||||
if d.CommitterName != "" && d.CommitterEmail == "" {
|
||||
return errors.New("committer email is required")
|
||||
}
|
||||
if d.CommitterName == "" && d.CommitterEmail != "" {
|
||||
return errors.New("committer name is required")
|
||||
}
|
||||
if d.AuthorName != "" && d.AuthorEmail == "" {
|
||||
return errors.New("author email is required")
|
||||
}
|
||||
if d.AuthorName == "" && d.AuthorEmail != "" {
|
||||
return errors.New("author name is required")
|
||||
}
|
||||
var err error
|
||||
d.mkdirMsgTmpl, err = template.New("mkdirCommitMsgTemplate").Parse(d.MkdirCommitMsg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.deleteMsgTmpl, err = template.New("deleteCommitMsgTemplate").Parse(d.DeleteCommitMsg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.putMsgTmpl, err = template.New("putCommitMsgTemplate").Parse(d.PutCommitMsg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.renameMsgTmpl, err = template.New("renameCommitMsgTemplate").Parse(d.RenameCommitMsg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.copyMsgTmpl, err = template.New("copyCommitMsgTemplate").Parse(d.CopyCommitMsg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.moveMsgTmpl, err = template.New("moveCommitMsgTemplate").Parse(d.MoveCommitMsg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.client = base.NewRestyClient().
|
||||
SetHeader("Accept", "application/vnd.github.object+json").
|
||||
SetHeader("Authorization", "Bearer "+d.Token).
|
||||
SetHeader("X-GitHub-Api-Version", "2022-11-28").
|
||||
SetLogger(log.StandardLogger()).
|
||||
SetDebug(false)
|
||||
if d.Ref == "" {
|
||||
repo, err := d.getRepo()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.Ref = repo.DefaultBranch
|
||||
d.isOnBranch = true
|
||||
} else {
|
||||
_, err = d.getBranchHead()
|
||||
d.isOnBranch = err == nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Github) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Github) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
obj, err := d.get(dir.GetPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if obj.Entries == nil {
|
||||
return nil, errs.NotFolder
|
||||
}
|
||||
if len(obj.Entries) >= 1000 {
|
||||
tree, err := d.getTree(obj.Sha)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if tree.Truncated {
|
||||
return nil, fmt.Errorf("tree %s is truncated", dir.GetPath())
|
||||
}
|
||||
ret := make([]model.Obj, 0, len(tree.Trees))
|
||||
for _, t := range tree.Trees {
|
||||
if t.Path != ".gitkeep" {
|
||||
ret = append(ret, t.toModelObj())
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
} else {
|
||||
ret := make([]model.Obj, 0, len(obj.Entries))
|
||||
for _, entry := range obj.Entries {
|
||||
if entry.Name != ".gitkeep" {
|
||||
ret = append(ret, entry.toModelObj())
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Github) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
obj, err := d.get(file.GetPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if obj.Type == "submodule" {
|
||||
return nil, errors.New("cannot download a submodule")
|
||||
}
|
||||
return &model.Link{
|
||||
URL: obj.DownloadURL,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Github) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
if !d.isOnBranch {
|
||||
return errors.New("cannot write to non-branch reference")
|
||||
}
|
||||
d.commitMutex.Lock()
|
||||
defer d.commitMutex.Unlock()
|
||||
parent, err := d.get(parentDir.GetPath())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if parent.Entries == nil {
|
||||
return errs.NotFolder
|
||||
}
|
||||
// if parent folder contains .gitkeep only, mark it and delete .gitkeep later
|
||||
gitKeepSha := ""
|
||||
if len(parent.Entries) == 1 && parent.Entries[0].Name == ".gitkeep" {
|
||||
gitKeepSha = parent.Entries[0].Sha
|
||||
}
|
||||
|
||||
commitMessage, err := getMessage(d.mkdirMsgTmpl, &MessageTemplateVars{
|
||||
UserName: getUsername(ctx),
|
||||
ObjName: dirName,
|
||||
ObjPath: stdpath.Join(parentDir.GetPath(), dirName),
|
||||
ParentName: parentDir.GetName(),
|
||||
ParentPath: parentDir.GetPath(),
|
||||
}, "mkdir")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = d.createGitKeep(stdpath.Join(parentDir.GetPath(), dirName), commitMessage); err != nil {
|
||||
return err
|
||||
}
|
||||
if gitKeepSha != "" {
|
||||
err = d.delete(stdpath.Join(parentDir.GetPath(), ".gitkeep"), gitKeepSha, commitMessage)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Github) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
if !d.isOnBranch {
|
||||
return errors.New("cannot write to non-branch reference")
|
||||
}
|
||||
if strings.HasPrefix(dstDir.GetPath(), srcObj.GetPath()) {
|
||||
return errors.New("cannot move parent dir to child")
|
||||
}
|
||||
d.commitMutex.Lock()
|
||||
defer d.commitMutex.Unlock()
|
||||
|
||||
var rootSha string
|
||||
if strings.HasPrefix(dstDir.GetPath(), stdpath.Dir(srcObj.GetPath())) { // /aa/1 -> /aa/bb/
|
||||
dstOldSha, dstNewSha, ancestorOldSha, srcParentTree, err := d.copyWithoutRenewTree(srcObj, dstDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
srcParentPath := stdpath.Dir(srcObj.GetPath())
|
||||
dstRest := dstDir.GetPath()[len(srcParentPath):]
|
||||
if dstRest[0] == '/' {
|
||||
dstRest = dstRest[1:]
|
||||
}
|
||||
dstNextName, _, _ := strings.Cut(dstRest, "/")
|
||||
dstNextPath := stdpath.Join(srcParentPath, dstNextName)
|
||||
dstNextTreeSha, err := d.renewParentTrees(dstDir.GetPath(), dstOldSha, dstNewSha, dstNextPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var delSrc, dstNextTree *TreeObjReq = nil, nil
|
||||
for _, t := range srcParentTree.Trees {
|
||||
if t.Path == dstNextName {
|
||||
dstNextTree = &t.TreeObjReq
|
||||
dstNextTree.Sha = dstNextTreeSha
|
||||
}
|
||||
if t.Path == srcObj.GetName() {
|
||||
delSrc = &t.TreeObjReq
|
||||
delSrc.Sha = nil
|
||||
}
|
||||
if delSrc != nil && dstNextTree != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if delSrc == nil || dstNextTree == nil {
|
||||
return errs.ObjectNotFound
|
||||
}
|
||||
ancestorNewSha, err := d.newTree(ancestorOldSha, []interface{}{*delSrc, *dstNextTree})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rootSha, err = d.renewParentTrees(srcParentPath, ancestorOldSha, ancestorNewSha, "/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if strings.HasPrefix(srcObj.GetPath(), dstDir.GetPath()) { // /aa/bb/1 -> /aa/
|
||||
srcParentPath := stdpath.Dir(srcObj.GetPath())
|
||||
srcParentTree, srcParentOldSha, err := d.getTreeDirectly(srcParentPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var src *TreeObjReq = nil
|
||||
for _, t := range srcParentTree.Trees {
|
||||
if t.Path == srcObj.GetName() {
|
||||
if t.Type == "commit" {
|
||||
return errors.New("cannot move a submodule")
|
||||
}
|
||||
src = &t.TreeObjReq
|
||||
break
|
||||
}
|
||||
}
|
||||
if src == nil {
|
||||
return errs.ObjectNotFound
|
||||
}
|
||||
|
||||
delSrc := *src
|
||||
delSrc.Sha = nil
|
||||
delSrcTree := make([]interface{}, 0, 2)
|
||||
delSrcTree = append(delSrcTree, delSrc)
|
||||
if len(srcParentTree.Trees) == 1 {
|
||||
delSrcTree = append(delSrcTree, map[string]string{
|
||||
"path": ".gitkeep",
|
||||
"mode": "100644",
|
||||
"type": "blob",
|
||||
"content": "",
|
||||
})
|
||||
}
|
||||
srcParentNewSha, err := d.newTree(srcParentOldSha, delSrcTree)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
srcRest := srcObj.GetPath()[len(dstDir.GetPath()):]
|
||||
if srcRest[0] == '/' {
|
||||
srcRest = srcRest[1:]
|
||||
}
|
||||
srcNextName, _, ok := strings.Cut(srcRest, "/")
|
||||
if !ok { // /aa/1 -> /aa/
|
||||
return errors.New("cannot move in place")
|
||||
}
|
||||
srcNextPath := stdpath.Join(dstDir.GetPath(), srcNextName)
|
||||
srcNextTreeSha, err := d.renewParentTrees(srcParentPath, srcParentOldSha, srcParentNewSha, srcNextPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ancestorTree, ancestorOldSha, err := d.getTreeDirectly(dstDir.GetPath())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var srcNextTree *TreeObjReq = nil
|
||||
for _, t := range ancestorTree.Trees {
|
||||
if t.Path == srcNextName {
|
||||
srcNextTree = &t.TreeObjReq
|
||||
srcNextTree.Sha = srcNextTreeSha
|
||||
break
|
||||
}
|
||||
}
|
||||
if srcNextTree == nil {
|
||||
return errs.ObjectNotFound
|
||||
}
|
||||
ancestorNewSha, err := d.newTree(ancestorOldSha, []interface{}{*srcNextTree, *src})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rootSha, err = d.renewParentTrees(dstDir.GetPath(), ancestorOldSha, ancestorNewSha, "/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else { // /aa/1 -> /bb/
|
||||
// do copy
|
||||
dstOldSha, dstNewSha, srcParentOldSha, srcParentTree, err := d.copyWithoutRenewTree(srcObj, dstDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// delete src object and create new tree
|
||||
var srcNewTree *TreeObjReq = nil
|
||||
for _, t := range srcParentTree.Trees {
|
||||
if t.Path == srcObj.GetName() {
|
||||
srcNewTree = &t.TreeObjReq
|
||||
srcNewTree.Sha = nil
|
||||
break
|
||||
}
|
||||
}
|
||||
if srcNewTree == nil {
|
||||
return errs.ObjectNotFound
|
||||
}
|
||||
delSrcTree := make([]interface{}, 0, 2)
|
||||
delSrcTree = append(delSrcTree, *srcNewTree)
|
||||
if len(srcParentTree.Trees) == 1 {
|
||||
delSrcTree = append(delSrcTree, map[string]string{
|
||||
"path": ".gitkeep",
|
||||
"mode": "100644",
|
||||
"type": "blob",
|
||||
"content": "",
|
||||
})
|
||||
}
|
||||
srcParentNewSha, err := d.newTree(srcParentOldSha, delSrcTree)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// renew but the common ancestor of srcPath and dstPath
|
||||
ancestor, srcChildName, dstChildName, _, _ := getPathCommonAncestor(srcObj.GetPath(), dstDir.GetPath())
|
||||
dstNextTreeSha, err := d.renewParentTrees(dstDir.GetPath(), dstOldSha, dstNewSha, stdpath.Join(ancestor, dstChildName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
srcNextTreeSha, err := d.renewParentTrees(stdpath.Dir(srcObj.GetPath()), srcParentOldSha, srcParentNewSha, stdpath.Join(ancestor, srcChildName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// renew the tree of the last common ancestor
|
||||
ancestorTree, ancestorOldSha, err := d.getTreeDirectly(ancestor)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newTree := make([]interface{}, 2)
|
||||
srcBind := false
|
||||
dstBind := false
|
||||
for _, t := range ancestorTree.Trees {
|
||||
if t.Path == srcChildName {
|
||||
t.Sha = srcNextTreeSha
|
||||
newTree[0] = t.TreeObjReq
|
||||
srcBind = true
|
||||
}
|
||||
if t.Path == dstChildName {
|
||||
t.Sha = dstNextTreeSha
|
||||
newTree[1] = t.TreeObjReq
|
||||
dstBind = true
|
||||
}
|
||||
if srcBind && dstBind {
|
||||
break
|
||||
}
|
||||
}
|
||||
if !srcBind || !dstBind {
|
||||
return errs.ObjectNotFound
|
||||
}
|
||||
ancestorNewSha, err := d.newTree(ancestorOldSha, newTree)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// renew until root
|
||||
rootSha, err = d.renewParentTrees(ancestor, ancestorOldSha, ancestorNewSha, "/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// commit
|
||||
message, err := getMessage(d.moveMsgTmpl, &MessageTemplateVars{
|
||||
UserName: getUsername(ctx),
|
||||
ObjName: srcObj.GetName(),
|
||||
ObjPath: srcObj.GetPath(),
|
||||
ParentName: stdpath.Base(stdpath.Dir(srcObj.GetPath())),
|
||||
ParentPath: stdpath.Dir(srcObj.GetPath()),
|
||||
TargetName: stdpath.Base(dstDir.GetPath()),
|
||||
TargetPath: dstDir.GetPath(),
|
||||
}, "move")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return d.commit(message, rootSha)
|
||||
}
|
||||
|
||||
func (d *Github) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
if !d.isOnBranch {
|
||||
return errors.New("cannot write to non-branch reference")
|
||||
}
|
||||
d.commitMutex.Lock()
|
||||
defer d.commitMutex.Unlock()
|
||||
parentDir := stdpath.Dir(srcObj.GetPath())
|
||||
tree, _, err := d.getTreeDirectly(parentDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newTree := make([]interface{}, 2)
|
||||
operated := false
|
||||
for _, t := range tree.Trees {
|
||||
if t.Path == srcObj.GetName() {
|
||||
if t.Type == "commit" {
|
||||
return errors.New("cannot rename a submodule")
|
||||
}
|
||||
delCopy := t.TreeObjReq
|
||||
delCopy.Sha = nil
|
||||
newTree[0] = delCopy
|
||||
t.Path = newName
|
||||
newTree[1] = t.TreeObjReq
|
||||
operated = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !operated {
|
||||
return errs.ObjectNotFound
|
||||
}
|
||||
newSha, err := d.newTree(tree.Sha, newTree)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rootSha, err := d.renewParentTrees(parentDir, tree.Sha, newSha, "/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
message, err := getMessage(d.renameMsgTmpl, &MessageTemplateVars{
|
||||
UserName: getUsername(ctx),
|
||||
ObjName: srcObj.GetName(),
|
||||
ObjPath: srcObj.GetPath(),
|
||||
ParentName: stdpath.Base(parentDir),
|
||||
ParentPath: parentDir,
|
||||
TargetName: newName,
|
||||
TargetPath: stdpath.Join(parentDir, newName),
|
||||
}, "rename")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return d.commit(message, rootSha)
|
||||
}
|
||||
|
||||
func (d *Github) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
if !d.isOnBranch {
|
||||
return errors.New("cannot write to non-branch reference")
|
||||
}
|
||||
if strings.HasPrefix(dstDir.GetPath(), srcObj.GetPath()) {
|
||||
return errors.New("cannot copy parent dir to child")
|
||||
}
|
||||
d.commitMutex.Lock()
|
||||
defer d.commitMutex.Unlock()
|
||||
|
||||
dstSha, newSha, _, _, err := d.copyWithoutRenewTree(srcObj, dstDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rootSha, err := d.renewParentTrees(dstDir.GetPath(), dstSha, newSha, "/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
message, err := getMessage(d.copyMsgTmpl, &MessageTemplateVars{
|
||||
UserName: getUsername(ctx),
|
||||
ObjName: srcObj.GetName(),
|
||||
ObjPath: srcObj.GetPath(),
|
||||
ParentName: stdpath.Base(stdpath.Dir(srcObj.GetPath())),
|
||||
ParentPath: stdpath.Dir(srcObj.GetPath()),
|
||||
TargetName: stdpath.Base(dstDir.GetPath()),
|
||||
TargetPath: dstDir.GetPath(),
|
||||
}, "copy")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return d.commit(message, rootSha)
|
||||
}
|
||||
|
||||
func (d *Github) Remove(ctx context.Context, obj model.Obj) error {
|
||||
if !d.isOnBranch {
|
||||
return errors.New("cannot write to non-branch reference")
|
||||
}
|
||||
d.commitMutex.Lock()
|
||||
defer d.commitMutex.Unlock()
|
||||
parentDir := stdpath.Dir(obj.GetPath())
|
||||
tree, treeSha, err := d.getTreeDirectly(parentDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var del *TreeObjReq = nil
|
||||
for _, t := range tree.Trees {
|
||||
if t.Path == obj.GetName() {
|
||||
if t.Type == "commit" {
|
||||
return errors.New("cannot remove a submodule")
|
||||
}
|
||||
del = &t.TreeObjReq
|
||||
del.Sha = nil
|
||||
break
|
||||
}
|
||||
}
|
||||
if del == nil {
|
||||
return errs.ObjectNotFound
|
||||
}
|
||||
newTree := make([]interface{}, 0, 2)
|
||||
newTree = append(newTree, *del)
|
||||
if len(tree.Trees) == 1 { // completely emptying the repository will get a 404
|
||||
newTree = append(newTree, map[string]string{
|
||||
"path": ".gitkeep",
|
||||
"mode": "100644",
|
||||
"type": "blob",
|
||||
"content": "",
|
||||
})
|
||||
}
|
||||
newSha, err := d.newTree(treeSha, newTree)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rootSha, err := d.renewParentTrees(parentDir, treeSha, newSha, "/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
commitMessage, err := getMessage(d.deleteMsgTmpl, &MessageTemplateVars{
|
||||
UserName: getUsername(ctx),
|
||||
ObjName: obj.GetName(),
|
||||
ObjPath: obj.GetPath(),
|
||||
ParentName: stdpath.Base(parentDir),
|
||||
ParentPath: parentDir,
|
||||
}, "remove")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return d.commit(commitMessage, rootSha)
|
||||
}
|
||||
|
||||
func (d *Github) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
if !d.isOnBranch {
|
||||
return errors.New("cannot write to non-branch reference")
|
||||
}
|
||||
blob, err := d.putBlob(ctx, stream, up)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.commitMutex.Lock()
|
||||
defer d.commitMutex.Unlock()
|
||||
parent, err := d.get(dstDir.GetPath())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if parent.Entries == nil {
|
||||
return errs.NotFolder
|
||||
}
|
||||
newTree := make([]interface{}, 0, 2)
|
||||
newTree = append(newTree, TreeObjReq{
|
||||
Path: stream.GetName(),
|
||||
Mode: "100644",
|
||||
Type: "blob",
|
||||
Sha: blob,
|
||||
})
|
||||
if len(parent.Entries) == 1 && parent.Entries[0].Name == ".gitkeep" {
|
||||
newTree = append(newTree, TreeObjReq{
|
||||
Path: ".gitkeep",
|
||||
Mode: "100644",
|
||||
Type: "blob",
|
||||
Sha: nil,
|
||||
})
|
||||
}
|
||||
newSha, err := d.newTree(parent.Sha, newTree)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rootSha, err := d.renewParentTrees(dstDir.GetPath(), parent.Sha, newSha, "/")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
commitMessage, err := getMessage(d.putMsgTmpl, &MessageTemplateVars{
|
||||
UserName: getUsername(ctx),
|
||||
ObjName: stream.GetName(),
|
||||
ObjPath: stdpath.Join(dstDir.GetPath(), stream.GetName()),
|
||||
ParentName: dstDir.GetName(),
|
||||
ParentPath: dstDir.GetPath(),
|
||||
}, "upload")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return d.commit(commitMessage, rootSha)
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Github)(nil)
|
||||
|
||||
func (d *Github) getContentApiUrl(path string) string {
|
||||
path = utils.FixAndCleanPath(path)
|
||||
return fmt.Sprintf("https://api.github.com/repos/%s/%s/contents%s", d.Owner, d.Repo, path)
|
||||
}
|
||||
|
||||
func (d *Github) get(path string) (*Object, error) {
|
||||
res, err := d.client.R().SetQueryParam("ref", d.Ref).Get(d.getContentApiUrl(path))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
return nil, toErr(res)
|
||||
}
|
||||
var resp Object
|
||||
err = utils.Json.Unmarshal(res.Body(), &resp)
|
||||
return &resp, err
|
||||
}
|
||||
|
||||
func (d *Github) createGitKeep(path, message string) error {
|
||||
body := map[string]interface{}{
|
||||
"message": message,
|
||||
"content": "",
|
||||
"branch": d.Ref,
|
||||
}
|
||||
d.addCommitterAndAuthor(&body)
|
||||
|
||||
res, err := d.client.R().SetBody(body).Put(d.getContentApiUrl(stdpath.Join(path, ".gitkeep")))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode() != 200 && res.StatusCode() != 201 {
|
||||
return toErr(res)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Github) putBlob(ctx context.Context, stream model.FileStreamer, up driver.UpdateProgress) (string, error) {
|
||||
beforeContent := "{\"encoding\":\"base64\",\"content\":\""
|
||||
afterContent := "\"}"
|
||||
length := int64(len(beforeContent)) + calculateBase64Length(stream.GetSize()) + int64(len(afterContent))
|
||||
beforeContentReader := strings.NewReader(beforeContent)
|
||||
contentReader, contentWriter := io.Pipe()
|
||||
go func() {
|
||||
encoder := base64.NewEncoder(base64.StdEncoding, contentWriter)
|
||||
if _, err := utils.CopyWithBuffer(encoder, stream); err != nil {
|
||||
_ = contentWriter.CloseWithError(err)
|
||||
return
|
||||
}
|
||||
_ = encoder.Close()
|
||||
_ = contentWriter.Close()
|
||||
}()
|
||||
afterContentReader := strings.NewReader(afterContent)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
||||
fmt.Sprintf("https://api.github.com/repos/%s/%s/git/blobs", d.Owner, d.Repo),
|
||||
&ReaderWithProgress{
|
||||
Reader: io.MultiReader(beforeContentReader, contentReader, afterContentReader),
|
||||
Length: length,
|
||||
Progress: up,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("Accept", "application/vnd.github+json")
|
||||
req.Header.Set("Authorization", "Bearer "+d.Token)
|
||||
req.Header.Set("X-GitHub-Api-Version", "2022-11-28")
|
||||
req.ContentLength = length
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
resBody, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if res.StatusCode != 201 {
|
||||
var errMsg ErrResp
|
||||
if err = utils.Json.Unmarshal(resBody, &errMsg); err != nil {
|
||||
return "", errors.New(res.Status)
|
||||
} else {
|
||||
return "", fmt.Errorf("%s: %s", res.Status, errMsg.Message)
|
||||
}
|
||||
}
|
||||
var resp PutBlobResp
|
||||
if err = utils.Json.Unmarshal(resBody, &resp); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return resp.Sha, nil
|
||||
}
|
||||
|
||||
func (d *Github) delete(path, sha, message string) error {
|
||||
body := map[string]interface{}{
|
||||
"message": message,
|
||||
"sha": sha,
|
||||
"branch": d.Ref,
|
||||
}
|
||||
d.addCommitterAndAuthor(&body)
|
||||
res, err := d.client.R().SetBody(body).Delete(d.getContentApiUrl(path))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
return toErr(res)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Github) renewParentTrees(path, prevSha, curSha, until string) (string, error) {
|
||||
for path != until {
|
||||
path = stdpath.Dir(path)
|
||||
tree, sha, err := d.getTreeDirectly(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
var newTree *TreeObjReq = nil
|
||||
for _, t := range tree.Trees {
|
||||
if t.Sha == prevSha {
|
||||
newTree = &t.TreeObjReq
|
||||
newTree.Sha = curSha
|
||||
break
|
||||
}
|
||||
}
|
||||
if newTree == nil {
|
||||
return "", errs.ObjectNotFound
|
||||
}
|
||||
curSha, err = d.newTree(sha, []interface{}{*newTree})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
prevSha = sha
|
||||
}
|
||||
return curSha, nil
|
||||
}
|
||||
|
||||
func (d *Github) getTree(sha string) (*TreeResp, error) {
|
||||
res, err := d.client.R().Get(fmt.Sprintf("https://api.github.com/repos/%s/%s/git/trees/%s", d.Owner, d.Repo, sha))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
return nil, toErr(res)
|
||||
}
|
||||
var resp TreeResp
|
||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Github) getTreeDirectly(path string) (*TreeResp, string, error) {
|
||||
p, err := d.get(path)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
if p.Entries == nil {
|
||||
return nil, "", fmt.Errorf("%s is not a folder", path)
|
||||
}
|
||||
tree, err := d.getTree(p.Sha)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
if tree.Truncated {
|
||||
return nil, "", fmt.Errorf("tree %s is truncated", path)
|
||||
}
|
||||
return tree, p.Sha, nil
|
||||
}
|
||||
|
||||
func (d *Github) newTree(baseSha string, tree []interface{}) (string, error) {
|
||||
res, err := d.client.R().
|
||||
SetBody(&TreeReq{
|
||||
BaseTree: baseSha,
|
||||
Trees: tree,
|
||||
}).
|
||||
Post(fmt.Sprintf("https://api.github.com/repos/%s/%s/git/trees", d.Owner, d.Repo))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if res.StatusCode() != 201 {
|
||||
return "", toErr(res)
|
||||
}
|
||||
var resp TreeResp
|
||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return resp.Sha, nil
|
||||
}
|
||||
|
||||
func (d *Github) commit(message, treeSha string) error {
|
||||
oldCommit, err := d.getBranchHead()
|
||||
body := map[string]interface{}{
|
||||
"message": message,
|
||||
"tree": treeSha,
|
||||
"parents": []string{oldCommit},
|
||||
}
|
||||
d.addCommitterAndAuthor(&body)
|
||||
res, err := d.client.R().SetBody(body).Post(fmt.Sprintf("https://api.github.com/repos/%s/%s/git/commits", d.Owner, d.Repo))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode() != 201 {
|
||||
return toErr(res)
|
||||
}
|
||||
var resp CommitResp
|
||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update branch head
|
||||
res, err = d.client.R().
|
||||
SetBody(&UpdateRefReq{
|
||||
Sha: resp.Sha,
|
||||
Force: false,
|
||||
}).
|
||||
Patch(fmt.Sprintf("https://api.github.com/repos/%s/%s/git/refs/heads/%s", d.Owner, d.Repo, d.Ref))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
return toErr(res)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Github) getBranchHead() (string, error) {
|
||||
res, err := d.client.R().Get(fmt.Sprintf("https://api.github.com/repos/%s/%s/branches/%s", d.Owner, d.Repo, d.Ref))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
return "", toErr(res)
|
||||
}
|
||||
var resp BranchResp
|
||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return resp.Commit.Sha, nil
|
||||
}
|
||||
|
||||
func (d *Github) copyWithoutRenewTree(srcObj, dstDir model.Obj) (dstSha, newSha, srcParentSha string, srcParentTree *TreeResp, err error) {
|
||||
dst, err := d.get(dstDir.GetPath())
|
||||
if err != nil {
|
||||
return "", "", "", nil, err
|
||||
}
|
||||
if dst.Entries == nil {
|
||||
return "", "", "", nil, errs.NotFolder
|
||||
}
|
||||
dstSha = dst.Sha
|
||||
srcParentPath := stdpath.Dir(srcObj.GetPath())
|
||||
srcParentTree, srcParentSha, err = d.getTreeDirectly(srcParentPath)
|
||||
if err != nil {
|
||||
return "", "", "", nil, err
|
||||
}
|
||||
var src *TreeObjReq = nil
|
||||
for _, t := range srcParentTree.Trees {
|
||||
if t.Path == srcObj.GetName() {
|
||||
if t.Type == "commit" {
|
||||
return "", "", "", nil, errors.New("cannot copy a submodule")
|
||||
}
|
||||
src = &t.TreeObjReq
|
||||
break
|
||||
}
|
||||
}
|
||||
if src == nil {
|
||||
return "", "", "", nil, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
newTree := make([]interface{}, 0, 2)
|
||||
newTree = append(newTree, *src)
|
||||
if len(dst.Entries) == 1 && dst.Entries[0].Name == ".gitkeep" {
|
||||
newTree = append(newTree, TreeObjReq{
|
||||
Path: ".gitkeep",
|
||||
Mode: "100644",
|
||||
Type: "blob",
|
||||
Sha: nil,
|
||||
})
|
||||
}
|
||||
newSha, err = d.newTree(dstSha, newTree)
|
||||
if err != nil {
|
||||
return "", "", "", nil, err
|
||||
}
|
||||
return dstSha, newSha, srcParentSha, srcParentTree, nil
|
||||
}
|
||||
|
||||
func (d *Github) getRepo() (*RepoResp, error) {
|
||||
res, err := d.client.R().Get(fmt.Sprintf("https://api.github.com/repos/%s/%s", d.Owner, d.Repo))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
return nil, toErr(res)
|
||||
}
|
||||
var resp RepoResp
|
||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Github) addCommitterAndAuthor(m *map[string]interface{}) {
|
||||
if d.CommitterName != "" {
|
||||
committer := map[string]string{
|
||||
"name": d.CommitterName,
|
||||
"email": d.CommitterEmail,
|
||||
}
|
||||
(*m)["committer"] = committer
|
||||
}
|
||||
if d.AuthorName != "" {
|
||||
author := map[string]string{
|
||||
"name": d.AuthorName,
|
||||
"email": d.AuthorEmail,
|
||||
}
|
||||
(*m)["author"] = author
|
||||
}
|
||||
}
|
36
drivers/github/meta.go
Normal file
36
drivers/github/meta.go
Normal file
@ -0,0 +1,36 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootPath
|
||||
Token string `json:"token" type:"string" required:"true"`
|
||||
Owner string `json:"owner" type:"string" required:"true"`
|
||||
Repo string `json:"repo" type:"string" required:"true"`
|
||||
Ref string `json:"ref" type:"string" help:"A branch, a tag or a commit SHA, main branch by default."`
|
||||
CommitterName string `json:"committer_name" type:"string"`
|
||||
CommitterEmail string `json:"committer_email" type:"string"`
|
||||
AuthorName string `json:"author_name" type:"string"`
|
||||
AuthorEmail string `json:"author_email" type:"string"`
|
||||
MkdirCommitMsg string `json:"mkdir_commit_message" type:"text" default:"{{.UserName}} mkdir {{.ObjPath}}"`
|
||||
DeleteCommitMsg string `json:"delete_commit_message" type:"text" default:"{{.UserName}} remove {{.ObjPath}}"`
|
||||
PutCommitMsg string `json:"put_commit_message" type:"text" default:"{{.UserName}} upload {{.ObjPath}}"`
|
||||
RenameCommitMsg string `json:"rename_commit_message" type:"text" default:"{{.UserName}} rename {{.ObjPath}} to {{.TargetName}}"`
|
||||
CopyCommitMsg string `json:"copy_commit_message" type:"text" default:"{{.UserName}} copy {{.ObjPath}} to {{.TargetPath}}"`
|
||||
MoveCommitMsg string `json:"move_commit_message" type:"text" default:"{{.UserName}} move {{.ObjPath}} to {{.TargetPath}}"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "GitHub API",
|
||||
LocalSort: true,
|
||||
DefaultRoot: "/",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Github{}
|
||||
})
|
||||
}
|
102
drivers/github/types.go
Normal file
102
drivers/github/types.go
Normal file
@ -0,0 +1,102 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Links struct {
|
||||
Git string `json:"git"`
|
||||
Html string `json:"html"`
|
||||
Self string `json:"self"`
|
||||
}
|
||||
|
||||
type Object struct {
|
||||
Type string `json:"type"`
|
||||
Encoding string `json:"encoding" required:"false"`
|
||||
Size int64 `json:"size"`
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Content string `json:"Content" required:"false"`
|
||||
Sha string `json:"sha"`
|
||||
URL string `json:"url"`
|
||||
GitURL string `json:"git_url"`
|
||||
HtmlURL string `json:"html_url"`
|
||||
DownloadURL string `json:"download_url"`
|
||||
Entries []Object `json:"entries" required:"false"`
|
||||
Links Links `json:"_links"`
|
||||
SubmoduleGitURL string `json:"submodule_git_url" required:"false"`
|
||||
Target string `json:"target" required:"false"`
|
||||
}
|
||||
|
||||
func (o *Object) toModelObj() *model.Object {
|
||||
return &model.Object{
|
||||
Name: o.Name,
|
||||
Size: o.Size,
|
||||
Modified: time.Unix(0, 0),
|
||||
IsFolder: o.Type == "dir",
|
||||
}
|
||||
}
|
||||
|
||||
type PutBlobResp struct {
|
||||
URL string `json:"url"`
|
||||
Sha string `json:"sha"`
|
||||
}
|
||||
|
||||
type ErrResp struct {
|
||||
Message string `json:"message"`
|
||||
DocumentationURL string `json:"documentation_url"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
type TreeObjReq struct {
|
||||
Path string `json:"path"`
|
||||
Mode string `json:"mode"`
|
||||
Type string `json:"type"`
|
||||
Sha interface{} `json:"sha"`
|
||||
}
|
||||
|
||||
type TreeObjResp struct {
|
||||
TreeObjReq
|
||||
Size int64 `json:"size" required:"false"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
func (o *TreeObjResp) toModelObj() *model.Object {
|
||||
return &model.Object{
|
||||
Name: o.Path,
|
||||
Size: o.Size,
|
||||
Modified: time.Unix(0, 0),
|
||||
IsFolder: o.Type == "tree",
|
||||
}
|
||||
}
|
||||
|
||||
type TreeResp struct {
|
||||
Sha string `json:"sha"`
|
||||
URL string `json:"url"`
|
||||
Trees []TreeObjResp `json:"tree"`
|
||||
Truncated bool `json:"truncated"`
|
||||
}
|
||||
|
||||
type TreeReq struct {
|
||||
BaseTree string `json:"base_tree"`
|
||||
Trees []interface{} `json:"tree"`
|
||||
}
|
||||
|
||||
type CommitResp struct {
|
||||
Sha string `json:"sha"`
|
||||
}
|
||||
|
||||
type BranchResp struct {
|
||||
Name string `json:"name"`
|
||||
Commit CommitResp `json:"commit"`
|
||||
}
|
||||
|
||||
type UpdateRefReq struct {
|
||||
Sha string `json:"sha"`
|
||||
Force bool `json:"force"`
|
||||
}
|
||||
|
||||
type RepoResp struct {
|
||||
DefaultBranch string `json:"default_branch"`
|
||||
}
|
115
drivers/github/util.go
Normal file
115
drivers/github/util.go
Normal file
@ -0,0 +1,115 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"io"
|
||||
"math"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type ReaderWithProgress struct {
|
||||
Reader io.Reader
|
||||
Length int64
|
||||
Progress func(percentage float64)
|
||||
offset int64
|
||||
}
|
||||
|
||||
func (r *ReaderWithProgress) Read(p []byte) (int, error) {
|
||||
n, err := r.Reader.Read(p)
|
||||
r.offset += int64(n)
|
||||
r.Progress(math.Min(100.0, float64(r.offset)/float64(r.Length)*100.0))
|
||||
return n, err
|
||||
}
|
||||
|
||||
type MessageTemplateVars struct {
|
||||
UserName string
|
||||
ObjName string
|
||||
ObjPath string
|
||||
ParentName string
|
||||
ParentPath string
|
||||
TargetName string
|
||||
TargetPath string
|
||||
}
|
||||
|
||||
func getMessage(tmpl *template.Template, vars *MessageTemplateVars, defaultOpStr string) (string, error) {
|
||||
sb := strings.Builder{}
|
||||
if err := tmpl.Execute(&sb, vars); err != nil {
|
||||
return fmt.Sprintf("%s %s %s", vars.UserName, defaultOpStr, vars.ObjPath), err
|
||||
}
|
||||
return sb.String(), nil
|
||||
}
|
||||
|
||||
func calculateBase64Length(inputLength int64) int64 {
|
||||
return 4 * ((inputLength + 2) / 3)
|
||||
}
|
||||
|
||||
func toErr(res *resty.Response) error {
|
||||
var errMsg ErrResp
|
||||
if err := utils.Json.Unmarshal(res.Body(), &errMsg); err != nil {
|
||||
return errors.New(res.Status())
|
||||
} else {
|
||||
return fmt.Errorf("%s: %s", res.Status(), errMsg.Message)
|
||||
}
|
||||
}
|
||||
|
||||
// Example input:
|
||||
// a = /aaa/bbb/ccc
|
||||
// b = /aaa/b11/ddd/ccc
|
||||
//
|
||||
// Output:
|
||||
// ancestor = /aaa
|
||||
// aChildName = bbb
|
||||
// bChildName = b11
|
||||
// aRest = bbb/ccc
|
||||
// bRest = b11/ddd/ccc
|
||||
func getPathCommonAncestor(a, b string) (ancestor, aChildName, bChildName, aRest, bRest string) {
|
||||
a = utils.FixAndCleanPath(a)
|
||||
b = utils.FixAndCleanPath(b)
|
||||
idx := 1
|
||||
for idx < len(a) && idx < len(b) {
|
||||
if a[idx] != b[idx] {
|
||||
break
|
||||
}
|
||||
idx++
|
||||
}
|
||||
aNextIdx := idx
|
||||
for aNextIdx < len(a) {
|
||||
if a[aNextIdx] == '/' {
|
||||
break
|
||||
}
|
||||
aNextIdx++
|
||||
}
|
||||
bNextIdx := idx
|
||||
for bNextIdx < len(b) {
|
||||
if b[bNextIdx] == '/' {
|
||||
break
|
||||
}
|
||||
bNextIdx++
|
||||
}
|
||||
for idx > 0 {
|
||||
if a[idx] == '/' {
|
||||
break
|
||||
}
|
||||
idx--
|
||||
}
|
||||
ancestor = utils.FixAndCleanPath(a[:idx])
|
||||
aChildName = a[idx+1 : aNextIdx]
|
||||
bChildName = b[idx+1 : bNextIdx]
|
||||
aRest = a[idx+1:]
|
||||
bRest = b[idx+1:]
|
||||
return ancestor, aChildName, bChildName, aRest, bRest
|
||||
}
|
||||
|
||||
func getUsername(ctx context.Context) string {
|
||||
user, ok := ctx.Value("user").(*model.User)
|
||||
if !ok {
|
||||
return "<system>"
|
||||
}
|
||||
return user.Username
|
||||
}
|
153
drivers/github_releases/driver.go
Normal file
153
drivers/github_releases/driver.go
Normal file
@ -0,0 +1,153 @@
|
||||
package github_releases
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
)
|
||||
|
||||
type GithubReleases struct {
|
||||
model.Storage
|
||||
Addition
|
||||
|
||||
releases []Release
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *GithubReleases) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Init(ctx context.Context) error {
|
||||
SetHeader(d.Addition.Token)
|
||||
repos, err := ParseRepos(d.Addition.RepoStructure, d.Addition.ShowAllVersion)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.releases = repos
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Drop(ctx context.Context) error {
|
||||
ClearCache()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *GithubReleases) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files := make([]File, 0)
|
||||
path := fmt.Sprintf("/%s", strings.Trim(dir.GetPath(), "/"))
|
||||
|
||||
for _, repo := range d.releases {
|
||||
if repo.Path == path { // 与仓库路径相同
|
||||
resp, err := GetRepoReleaseInfo(repo.RepoName, repo.ID, path, d.Storage.CacheExpiration)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, resp.Files...)
|
||||
|
||||
if d.Addition.ShowReadme {
|
||||
resp, err := GetGithubOtherFile(repo.RepoName, path, d.Storage.CacheExpiration)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, *resp...)
|
||||
}
|
||||
|
||||
} else if strings.HasPrefix(repo.Path, path) { // 仓库路径是目录的子目录
|
||||
nextDir := GetNextDir(repo.Path, path)
|
||||
if nextDir == "" {
|
||||
continue
|
||||
}
|
||||
if d.Addition.ShowAllVersion {
|
||||
files = append(files, File{
|
||||
FileName: nextDir,
|
||||
Size: 0,
|
||||
CreateAt: time.Time{},
|
||||
UpdateAt: time.Time{},
|
||||
Url: "",
|
||||
Type: "dir",
|
||||
Path: fmt.Sprintf("%s/%s", path, nextDir),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
repo, _ := GetRepoReleaseInfo(repo.RepoName, repo.Version, path, d.Storage.CacheExpiration)
|
||||
|
||||
hasSameDir := false
|
||||
for index, file := range files {
|
||||
if file.FileName == nextDir {
|
||||
hasSameDir = true
|
||||
files[index].Size += repo.Size
|
||||
files[index].UpdateAt = func(a time.Time, b time.Time) time.Time {
|
||||
if a.After(b) {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}(files[index].UpdateAt, repo.UpdateAt)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !hasSameDir {
|
||||
files = append(files, File{
|
||||
FileName: nextDir,
|
||||
Size: repo.Size,
|
||||
CreateAt: repo.CreateAt,
|
||||
UpdateAt: repo.UpdateAt,
|
||||
Url: repo.Url,
|
||||
Type: "dir",
|
||||
Path: fmt.Sprintf("%s/%s", path, nextDir),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return src, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
link := model.Link{
|
||||
URL: file.GetID(),
|
||||
Header: http.Header{},
|
||||
}
|
||||
return &link, nil
|
||||
}
|
||||
|
||||
func (d *GithubReleases) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *GithubReleases) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*GithubReleases)(nil)
|
34
drivers/github_releases/meta.go
Normal file
34
drivers/github_releases/meta.go
Normal file
@ -0,0 +1,34 @@
|
||||
package github_releases
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootID
|
||||
RepoStructure string `json:"repo_structure" type:"text" required:"true" default:"/path/to/alist-gh:alistGo/alist\n/path/to2/alist-web-gh:AlistGo/alist-web" help:"structure:[path:]org/repo"`
|
||||
ShowReadme bool `json:"show_readme" type:"bool" default:"true" help:"show README、LICENSE file"`
|
||||
Token string `json:"token" type:"string" required:"false" help:"GitHub token, if you want to access private repositories or increase the rate limit"`
|
||||
ShowAllVersion bool `json:"show_all_version" type:"bool" default:"false" help:"show all versions"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "GitHub Releases",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &GithubReleases{}
|
||||
})
|
||||
}
|
68
drivers/github_releases/types.go
Normal file
68
drivers/github_releases/types.go
Normal file
@ -0,0 +1,68 @@
|
||||
package github_releases
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
)
|
||||
|
||||
type File struct {
|
||||
FileName string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
CreateAt time.Time `json:"time"`
|
||||
UpdateAt time.Time `json:"chtime"`
|
||||
Url string `json:"url"`
|
||||
Type string `json:"type"`
|
||||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
func (f File) GetHash() utils.HashInfo {
|
||||
return utils.HashInfo{}
|
||||
}
|
||||
|
||||
func (f File) GetPath() string {
|
||||
return f.Path
|
||||
}
|
||||
|
||||
func (f File) GetSize() int64 {
|
||||
return f.Size
|
||||
}
|
||||
|
||||
func (f File) GetName() string {
|
||||
return f.FileName
|
||||
}
|
||||
|
||||
func (f File) ModTime() time.Time {
|
||||
return f.UpdateAt
|
||||
}
|
||||
|
||||
func (f File) CreateTime() time.Time {
|
||||
return f.CreateAt
|
||||
}
|
||||
|
||||
func (f File) IsDir() bool {
|
||||
return f.Type == "dir"
|
||||
}
|
||||
|
||||
func (f File) GetID() string {
|
||||
return f.Url
|
||||
}
|
||||
|
||||
func (f File) Thumb() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type ReleasesData struct {
|
||||
Files []File `json:"files"`
|
||||
Size int64 `json:"size"`
|
||||
UpdateAt time.Time `json:"chtime"`
|
||||
CreateAt time.Time `json:"time"`
|
||||
Url string `json:"url"`
|
||||
}
|
||||
|
||||
type Release struct {
|
||||
Path string // 挂载路径
|
||||
RepoName string // 仓库名称
|
||||
Version string // 版本号, tag
|
||||
ID string // 版本ID
|
||||
}
|
217
drivers/github_releases/util.go
Normal file
217
drivers/github_releases/util.go
Normal file
@ -0,0 +1,217 @@
|
||||
package github_releases
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var (
|
||||
cache = make(map[string]*resty.Response)
|
||||
created = make(map[string]time.Time)
|
||||
mu sync.Mutex
|
||||
req *resty.Request
|
||||
)
|
||||
|
||||
// 解析仓库列表
|
||||
func ParseRepos(text string, allVersion bool) ([]Release, error) {
|
||||
lines := strings.Split(text, "\n")
|
||||
var repos []Release
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
parts := strings.Split(line, ":")
|
||||
path, repo := "", ""
|
||||
if len(parts) == 1 {
|
||||
path = "/"
|
||||
repo = parts[0]
|
||||
} else if len(parts) == 2 {
|
||||
path = fmt.Sprintf("/%s", strings.Trim(parts[0], "/"))
|
||||
repo = parts[1]
|
||||
} else {
|
||||
return nil, fmt.Errorf("invalid format: %s", line)
|
||||
}
|
||||
|
||||
if allVersion {
|
||||
releases, _ := GetAllVersion(repo, path)
|
||||
repos = append(repos, *releases...)
|
||||
} else {
|
||||
repos = append(repos, Release{
|
||||
Path: path,
|
||||
RepoName: repo,
|
||||
Version: "latest",
|
||||
ID: "latest",
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
return repos, nil
|
||||
}
|
||||
|
||||
// 获取下一级目录
|
||||
func GetNextDir(wholePath string, basePath string) string {
|
||||
if !strings.HasSuffix(basePath, "/") {
|
||||
basePath += "/"
|
||||
}
|
||||
if !strings.HasPrefix(wholePath, basePath) {
|
||||
return ""
|
||||
}
|
||||
remainingPath := strings.TrimLeft(strings.TrimPrefix(wholePath, basePath), "/")
|
||||
if remainingPath != "" {
|
||||
parts := strings.Split(remainingPath, "/")
|
||||
return parts[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// 发送 GET 请求
|
||||
func GetRequest(url string, cacheExpiration int) (*resty.Response, error) {
|
||||
mu.Lock()
|
||||
if res, ok := cache[url]; ok && time.Now().Before(created[url].Add(time.Duration(cacheExpiration)*time.Minute)) {
|
||||
mu.Unlock()
|
||||
return res, nil
|
||||
}
|
||||
mu.Unlock()
|
||||
|
||||
res, err := req.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if res.StatusCode() != 200 {
|
||||
log.Warn("failed to get request: ", res.StatusCode(), res.String())
|
||||
}
|
||||
|
||||
mu.Lock()
|
||||
cache[url] = res
|
||||
created[url] = time.Now()
|
||||
mu.Unlock()
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// 获取 README、LICENSE 等文件
|
||||
func GetGithubOtherFile(repo string, basePath string, cacheExpiration int) (*[]File, error) {
|
||||
url := fmt.Sprintf("https://api.github.com/repos/%s/contents/", strings.Trim(repo, "/"))
|
||||
res, _ := GetRequest(url, cacheExpiration)
|
||||
body := jsoniter.Get(res.Body())
|
||||
var files []File
|
||||
for i := 0; i < body.Size(); i++ {
|
||||
filename := body.Get(i, "name").ToString()
|
||||
|
||||
re := regexp.MustCompile(`(?i)^(.*\.md|LICENSE)$`)
|
||||
|
||||
if !re.MatchString(filename) {
|
||||
continue
|
||||
}
|
||||
|
||||
files = append(files, File{
|
||||
FileName: filename,
|
||||
Size: body.Get(i, "size").ToInt64(),
|
||||
CreateAt: time.Time{},
|
||||
UpdateAt: time.Now(),
|
||||
Url: body.Get(i, "download_url").ToString(),
|
||||
Type: body.Get(i, "type").ToString(),
|
||||
Path: fmt.Sprintf("%s/%s", basePath, filename),
|
||||
})
|
||||
}
|
||||
return &files, nil
|
||||
}
|
||||
|
||||
// 获取 GitHub Release 详细信息
|
||||
func GetRepoReleaseInfo(repo string, version string, basePath string, cacheExpiration int) (*ReleasesData, error) {
|
||||
url := fmt.Sprintf("https://api.github.com/repos/%s/releases/%s", strings.Trim(repo, "/"), version)
|
||||
res, _ := GetRequest(url, cacheExpiration)
|
||||
body := res.Body()
|
||||
|
||||
if jsoniter.Get(res.Body(), "status").ToInt64() != 0 {
|
||||
return &ReleasesData{}, fmt.Errorf("%s", res.String())
|
||||
}
|
||||
|
||||
assets := jsoniter.Get(res.Body(), "assets")
|
||||
var files []File
|
||||
|
||||
for i := 0; i < assets.Size(); i++ {
|
||||
filename := assets.Get(i, "name").ToString()
|
||||
|
||||
files = append(files, File{
|
||||
FileName: filename,
|
||||
Size: assets.Get(i, "size").ToInt64(),
|
||||
Url: assets.Get(i, "browser_download_url").ToString(),
|
||||
Type: assets.Get(i, "content_type").ToString(),
|
||||
Path: fmt.Sprintf("%s/%s", basePath, filename),
|
||||
|
||||
CreateAt: func() time.Time {
|
||||
t, _ := time.Parse(time.RFC3339, assets.Get(i, "created_at").ToString())
|
||||
return t
|
||||
}(),
|
||||
UpdateAt: func() time.Time {
|
||||
t, _ := time.Parse(time.RFC3339, assets.Get(i, "updated_at").ToString())
|
||||
return t
|
||||
}(),
|
||||
})
|
||||
}
|
||||
|
||||
return &ReleasesData{
|
||||
Files: files,
|
||||
Url: jsoniter.Get(body, "html_url").ToString(),
|
||||
|
||||
Size: func() int64 {
|
||||
size := int64(0)
|
||||
for _, file := range files {
|
||||
size += file.Size
|
||||
}
|
||||
return size
|
||||
}(),
|
||||
UpdateAt: func() time.Time {
|
||||
t, _ := time.Parse(time.RFC3339, jsoniter.Get(body, "published_at").ToString())
|
||||
return t
|
||||
}(),
|
||||
CreateAt: func() time.Time {
|
||||
t, _ := time.Parse(time.RFC3339, jsoniter.Get(body, "created_at").ToString())
|
||||
return t
|
||||
}(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// 获取所有的版本号
|
||||
func GetAllVersion(repo string, path string) (*[]Release, error) {
|
||||
url := fmt.Sprintf("https://api.github.com/repos/%s/releases", strings.Trim(repo, "/"))
|
||||
res, _ := GetRequest(url, 0)
|
||||
body := jsoniter.Get(res.Body())
|
||||
releases := make([]Release, 0)
|
||||
for i := 0; i < body.Size(); i++ {
|
||||
version := body.Get(i, "tag_name").ToString()
|
||||
releases = append(releases, Release{
|
||||
Path: fmt.Sprintf("%s/%s", path, version),
|
||||
Version: version,
|
||||
RepoName: repo,
|
||||
ID: body.Get(i, "id").ToString(),
|
||||
})
|
||||
}
|
||||
return &releases, nil
|
||||
}
|
||||
|
||||
func ClearCache() {
|
||||
mu.Lock()
|
||||
cache = make(map[string]*resty.Response)
|
||||
created = make(map[string]time.Time)
|
||||
mu.Unlock()
|
||||
}
|
||||
|
||||
func SetHeader(token string) {
|
||||
req = base.RestyClient.R()
|
||||
if token != "" {
|
||||
req.SetHeader("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
}
|
||||
req.SetHeader("Accept", "application/vnd.github+json")
|
||||
req.SetHeader("X-GitHub-Api-Version", "2022-11-28")
|
||||
}
|
@ -58,33 +58,9 @@ func (d *GooglePhoto) Link(ctx context.Context, file model.Obj, args model.LinkA
|
||||
URL: f.BaseURL + "=d",
|
||||
}, nil
|
||||
} else if strings.Contains(f.MimeType, "video/") {
|
||||
var width, height int
|
||||
|
||||
fmt.Sscanf(f.MediaMetadata.Width, "%d", &width)
|
||||
fmt.Sscanf(f.MediaMetadata.Height, "%d", &height)
|
||||
|
||||
switch {
|
||||
// 1080P
|
||||
case width == 1920 && height == 1080:
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=m37",
|
||||
}, nil
|
||||
// 720P
|
||||
case width == 1280 && height == 720:
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=m22",
|
||||
}, nil
|
||||
// 360P
|
||||
case width == 640 && height == 360:
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=m18",
|
||||
}, nil
|
||||
default:
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=dv",
|
||||
}, nil
|
||||
}
|
||||
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=dv",
|
||||
}, nil
|
||||
}
|
||||
return &model.Link{}, nil
|
||||
}
|
||||
|
404
drivers/halalcloud/driver.go
Normal file
404
drivers/halalcloud/driver.go
Normal file
@ -0,0 +1,404 @@
|
||||
package halalcloud
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
"github.com/city404/v6-public-rpc-proto/go/v6/common"
|
||||
pbPublicUser "github.com/city404/v6-public-rpc-proto/go/v6/user"
|
||||
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
|
||||
"github.com/rclone/rclone/lib/readers"
|
||||
"github.com/zzzhr1990/go-common-entity/userfile"
|
||||
)
|
||||
|
||||
type HalalCloud struct {
|
||||
*HalalCommon
|
||||
model.Storage
|
||||
Addition
|
||||
|
||||
uploadThread int
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *HalalCloud) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Init(ctx context.Context) error {
|
||||
d.uploadThread, _ = strconv.Atoi(d.UploadThread)
|
||||
if d.uploadThread < 1 || d.uploadThread > 32 {
|
||||
d.uploadThread, d.UploadThread = 3, "3"
|
||||
}
|
||||
|
||||
if d.HalalCommon == nil {
|
||||
d.HalalCommon = &HalalCommon{
|
||||
Common: &Common{},
|
||||
AuthService: &AuthService{
|
||||
appID: func() string {
|
||||
if d.Addition.AppID != "" {
|
||||
return d.Addition.AppID
|
||||
}
|
||||
return AppID
|
||||
}(),
|
||||
appVersion: func() string {
|
||||
if d.Addition.AppVersion != "" {
|
||||
return d.Addition.AppVersion
|
||||
}
|
||||
return AppVersion
|
||||
}(),
|
||||
appSecret: func() string {
|
||||
if d.Addition.AppSecret != "" {
|
||||
return d.Addition.AppSecret
|
||||
}
|
||||
return AppSecret
|
||||
}(),
|
||||
tr: &TokenResp{
|
||||
RefreshToken: d.Addition.RefreshToken,
|
||||
},
|
||||
},
|
||||
UserInfo: &UserInfo{},
|
||||
refreshTokenFunc: func(token string) error {
|
||||
d.Addition.RefreshToken = token
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// 防止重复登录
|
||||
if d.Addition.RefreshToken == "" || !d.IsLogin() {
|
||||
as, err := d.NewAuthServiceWithOauth()
|
||||
if err != nil {
|
||||
d.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
|
||||
return err
|
||||
}
|
||||
d.HalalCommon.AuthService = as
|
||||
d.SetTokenResp(as.tr)
|
||||
op.MustSaveDriverStorage(d)
|
||||
}
|
||||
var err error
|
||||
d.HalalCommon.serv, err = d.NewAuthService(d.Addition.RefreshToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *HalalCloud) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
return d.getFiles(ctx, dir)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
return d.getLink(ctx, file, args)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
return d.makeDir(ctx, parentDir, dirName)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
return d.move(ctx, srcObj, dstDir)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
return d.rename(ctx, srcObj, newName)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
return d.copy(ctx, srcObj, dstDir)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return d.remove(ctx, obj)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
return d.put(ctx, dstDir, stream, up)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) IsLogin() bool {
|
||||
if d.AuthService.tr == nil {
|
||||
return false
|
||||
}
|
||||
serv, err := d.NewAuthService(d.Addition.RefreshToken)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
result, err := pbPublicUser.NewPubUserClient(serv.GetGrpcConnection()).Get(ctx, &pbPublicUser.User{
|
||||
Identity: "",
|
||||
})
|
||||
if result == nil || err != nil {
|
||||
return false
|
||||
}
|
||||
d.UserInfo.Identity = result.Identity
|
||||
d.UserInfo.CreateTs = result.CreateTs
|
||||
d.UserInfo.Name = result.Name
|
||||
d.UserInfo.UpdateTs = result.UpdateTs
|
||||
return true
|
||||
}
|
||||
|
||||
type HalalCommon struct {
|
||||
*Common
|
||||
*AuthService // 登录信息
|
||||
*UserInfo // 用户信息
|
||||
refreshTokenFunc func(token string) error
|
||||
serv *AuthService
|
||||
}
|
||||
|
||||
func (d *HalalCloud) SetTokenResp(tr *TokenResp) {
|
||||
d.Addition.RefreshToken = tr.RefreshToken
|
||||
}
|
||||
|
||||
func (d *HalalCloud) getFiles(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
|
||||
|
||||
files := make([]model.Obj, 0)
|
||||
limit := int64(100)
|
||||
token := ""
|
||||
client := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection())
|
||||
|
||||
opDir := d.GetCurrentDir(dir)
|
||||
|
||||
for {
|
||||
result, err := client.List(ctx, &pubUserFile.FileListRequest{
|
||||
Parent: &pubUserFile.File{Path: opDir},
|
||||
ListInfo: &common.ScanListRequest{
|
||||
Limit: limit,
|
||||
Token: token,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i := 0; len(result.Files) > i; i++ {
|
||||
files = append(files, (*Files)(result.Files[i]))
|
||||
}
|
||||
|
||||
if result.ListInfo == nil || result.ListInfo.Token == "" {
|
||||
break
|
||||
}
|
||||
token = result.ListInfo.Token
|
||||
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *HalalCloud) getLink(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
|
||||
client := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection())
|
||||
ctx1, cancelFunc := context.WithCancel(context.Background())
|
||||
defer cancelFunc()
|
||||
|
||||
result, err := client.ParseFileSlice(ctx1, (*pubUserFile.File)(file.(*Files)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fileAddrs := []*pubUserFile.SliceDownloadInfo{}
|
||||
var addressDuration int64
|
||||
|
||||
nodesNumber := len(result.RawNodes)
|
||||
nodesIndex := nodesNumber - 1
|
||||
startIndex, endIndex := 0, nodesIndex
|
||||
for nodesIndex >= 0 {
|
||||
if nodesIndex >= 200 {
|
||||
endIndex = 200
|
||||
} else {
|
||||
endIndex = nodesNumber
|
||||
}
|
||||
for ; endIndex <= nodesNumber; endIndex += 200 {
|
||||
if endIndex == 0 {
|
||||
endIndex = 1
|
||||
}
|
||||
sliceAddress, err := client.GetSliceDownloadAddress(ctx, &pubUserFile.SliceDownloadAddressRequest{
|
||||
Identity: result.RawNodes[startIndex:endIndex],
|
||||
Version: 1,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
addressDuration = sliceAddress.ExpireAt
|
||||
fileAddrs = append(fileAddrs, sliceAddress.Addresses...)
|
||||
startIndex = endIndex
|
||||
nodesIndex -= 200
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
size := result.FileSize
|
||||
chunks := getChunkSizes(result.Sizes)
|
||||
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
||||
length := httpRange.Length
|
||||
if httpRange.Length >= 0 && httpRange.Start+httpRange.Length >= size {
|
||||
length = -1
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("open download file failed: %w", err)
|
||||
}
|
||||
oo := &openObject{
|
||||
ctx: ctx,
|
||||
d: fileAddrs,
|
||||
chunk: &[]byte{},
|
||||
chunks: &chunks,
|
||||
skip: httpRange.Start,
|
||||
sha: result.Sha1,
|
||||
shaTemp: sha1.New(),
|
||||
}
|
||||
|
||||
return readers.NewLimitedReadCloser(oo, length), nil
|
||||
}
|
||||
|
||||
var duration time.Duration
|
||||
if addressDuration != 0 {
|
||||
duration = time.Until(time.UnixMilli(addressDuration))
|
||||
} else {
|
||||
duration = time.Until(time.Now().Add(time.Hour))
|
||||
}
|
||||
|
||||
resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader}
|
||||
return &model.Link{
|
||||
RangeReadCloser: resultRangeReadCloser,
|
||||
Expiration: &duration,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *HalalCloud) makeDir(ctx context.Context, dir model.Obj, name string) (model.Obj, error) {
|
||||
newDir := userfile.NewFormattedPath(d.GetCurrentOpDir(dir, []string{name}, 0)).GetPath()
|
||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Create(ctx, &pubUserFile.File{
|
||||
Path: newDir,
|
||||
})
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func (d *HalalCloud) move(ctx context.Context, obj model.Obj, dir model.Obj) (model.Obj, error) {
|
||||
oldDir := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
|
||||
newDir := userfile.NewFormattedPath(d.GetCurrentDir(dir)).GetPath()
|
||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Move(ctx, &pubUserFile.BatchOperationRequest{
|
||||
Source: []*pubUserFile.File{
|
||||
{
|
||||
Identity: obj.GetID(),
|
||||
Path: oldDir,
|
||||
},
|
||||
},
|
||||
Dest: &pubUserFile.File{
|
||||
Identity: dir.GetID(),
|
||||
Path: newDir,
|
||||
},
|
||||
})
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func (d *HalalCloud) rename(ctx context.Context, obj model.Obj, name string) (model.Obj, error) {
|
||||
id := obj.GetID()
|
||||
newPath := userfile.NewFormattedPath(d.GetCurrentOpDir(obj, []string{name}, 0)).GetPath()
|
||||
|
||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Rename(ctx, &pubUserFile.File{
|
||||
Path: newPath,
|
||||
Identity: id,
|
||||
Name: name,
|
||||
})
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func (d *HalalCloud) copy(ctx context.Context, obj model.Obj, dir model.Obj) (model.Obj, error) {
|
||||
id := obj.GetID()
|
||||
sourcePath := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
|
||||
if len(id) > 0 {
|
||||
sourcePath = ""
|
||||
}
|
||||
dest := &pubUserFile.File{
|
||||
Identity: dir.GetID(),
|
||||
Path: userfile.NewFormattedPath(d.GetCurrentDir(dir)).GetPath(),
|
||||
}
|
||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Copy(ctx, &pubUserFile.BatchOperationRequest{
|
||||
Source: []*pubUserFile.File{
|
||||
{
|
||||
Path: sourcePath,
|
||||
Identity: id,
|
||||
},
|
||||
},
|
||||
Dest: dest,
|
||||
})
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func (d *HalalCloud) remove(ctx context.Context, obj model.Obj) error {
|
||||
id := obj.GetID()
|
||||
newPath := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
|
||||
//if len(id) > 0 {
|
||||
// newPath = ""
|
||||
//}
|
||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Delete(ctx, &pubUserFile.BatchOperationRequest{
|
||||
Source: []*pubUserFile.File{
|
||||
{
|
||||
Path: newPath,
|
||||
Identity: id,
|
||||
},
|
||||
},
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *HalalCloud) put(ctx context.Context, dstDir model.Obj, fileStream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
|
||||
newDir := path.Join(dstDir.GetPath(), fileStream.GetName())
|
||||
|
||||
result, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).CreateUploadToken(ctx, &pubUserFile.File{
|
||||
Path: newDir,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
u, _ := url.Parse(result.Endpoint)
|
||||
u.Host = "s3." + u.Host
|
||||
result.Endpoint = u.String()
|
||||
s, err := session.NewSession(&aws.Config{
|
||||
HTTPClient: base.HttpClient,
|
||||
Credentials: credentials.NewStaticCredentials(result.AccessKey, result.SecretKey, result.Token),
|
||||
Region: aws.String(result.Region),
|
||||
Endpoint: aws.String(result.Endpoint),
|
||||
S3ForcePathStyle: aws.Bool(true),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
uploader := s3manager.NewUploader(s, func(u *s3manager.Uploader) {
|
||||
u.Concurrency = d.uploadThread
|
||||
})
|
||||
if fileStream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||
uploader.PartSize = fileStream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, &s3manager.UploadInput{
|
||||
Bucket: aws.String(result.Bucket),
|
||||
Key: aws.String(result.Key),
|
||||
Body: io.TeeReader(fileStream, driver.NewProgress(fileStream.GetSize(), up)),
|
||||
})
|
||||
return nil, err
|
||||
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*HalalCloud)(nil)
|
38
drivers/halalcloud/meta.go
Normal file
38
drivers/halalcloud/meta.go
Normal file
@ -0,0 +1,38 @@
|
||||
package halalcloud
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
// Usually one of two
|
||||
driver.RootPath
|
||||
// define other
|
||||
RefreshToken string `json:"refresh_token" required:"true" help:"login type is refresh_token,this is required"`
|
||||
UploadThread string `json:"upload_thread" default:"3" help:"1 <= thread <= 32"`
|
||||
|
||||
AppID string `json:"app_id" required:"true" default:"alist/10001"`
|
||||
AppVersion string `json:"app_version" required:"true" default:"1.0.0"`
|
||||
AppSecret string `json:"app_secret" required:"true" default:"bR4SJwOkvnG5WvVJ"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "HalalCloud",
|
||||
LocalSort: false,
|
||||
OnlyLocal: true,
|
||||
OnlyProxy: true,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "/",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &HalalCloud{}
|
||||
})
|
||||
}
|
52
drivers/halalcloud/options.go
Normal file
52
drivers/halalcloud/options.go
Normal file
@ -0,0 +1,52 @@
|
||||
package halalcloud
|
||||
|
||||
import "google.golang.org/grpc"
|
||||
|
||||
func defaultOptions() halalOptions {
|
||||
return halalOptions{
|
||||
// onRefreshTokenRefreshed: func(string) {},
|
||||
grpcOptions: []grpc.DialOption{
|
||||
grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(1024 * 1024 * 32)),
|
||||
// grpc.WithMaxMsgSize(1024 * 1024 * 1024),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type HalalOption interface {
|
||||
apply(*halalOptions)
|
||||
}
|
||||
|
||||
// halalOptions configure a RPC call. halalOptions are set by the HalalOption
|
||||
// values passed to Dial.
|
||||
type halalOptions struct {
|
||||
onTokenRefreshed func(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64)
|
||||
grpcOptions []grpc.DialOption
|
||||
}
|
||||
|
||||
// funcDialOption wraps a function that modifies halalOptions into an
|
||||
// implementation of the DialOption interface.
|
||||
type funcDialOption struct {
|
||||
f func(*halalOptions)
|
||||
}
|
||||
|
||||
func (fdo *funcDialOption) apply(do *halalOptions) {
|
||||
fdo.f(do)
|
||||
}
|
||||
|
||||
func newFuncDialOption(f func(*halalOptions)) *funcDialOption {
|
||||
return &funcDialOption{
|
||||
f: f,
|
||||
}
|
||||
}
|
||||
|
||||
func WithRefreshTokenRefreshedCallback(s func(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64)) HalalOption {
|
||||
return newFuncDialOption(func(o *halalOptions) {
|
||||
o.onTokenRefreshed = s
|
||||
})
|
||||
}
|
||||
|
||||
func WithGrpcDialOptions(opts ...grpc.DialOption) HalalOption {
|
||||
return newFuncDialOption(func(o *halalOptions) {
|
||||
o.grpcOptions = opts
|
||||
})
|
||||
}
|
101
drivers/halalcloud/types.go
Normal file
101
drivers/halalcloud/types.go
Normal file
@ -0,0 +1,101 @@
|
||||
package halalcloud
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/city404/v6-public-rpc-proto/go/v6/common"
|
||||
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
|
||||
"google.golang.org/grpc"
|
||||
"time"
|
||||
)
|
||||
|
||||
type AuthService struct {
|
||||
appID string
|
||||
appVersion string
|
||||
appSecret string
|
||||
grpcConnection *grpc.ClientConn
|
||||
dopts halalOptions
|
||||
tr *TokenResp
|
||||
}
|
||||
|
||||
type TokenResp struct {
|
||||
AccessToken string `json:"accessToken,omitempty"`
|
||||
AccessTokenExpiredAt int64 `json:"accessTokenExpiredAt,omitempty"`
|
||||
RefreshToken string `json:"refreshToken,omitempty"`
|
||||
RefreshTokenExpiredAt int64 `json:"refreshTokenExpiredAt,omitempty"`
|
||||
}
|
||||
|
||||
type UserInfo struct {
|
||||
Identity string `json:"identity,omitempty"`
|
||||
UpdateTs int64 `json:"updateTs,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
CreateTs int64 `json:"createTs,omitempty"`
|
||||
}
|
||||
|
||||
type OrderByInfo struct {
|
||||
Field string `json:"field,omitempty"`
|
||||
Asc bool `json:"asc,omitempty"`
|
||||
}
|
||||
|
||||
type ListInfo struct {
|
||||
Token string `json:"token,omitempty"`
|
||||
Limit int64 `json:"limit,omitempty"`
|
||||
OrderBy []*OrderByInfo `json:"order_by,omitempty"`
|
||||
Version int32 `json:"version,omitempty"`
|
||||
}
|
||||
|
||||
type FilesList struct {
|
||||
Files []*Files `json:"files,omitempty"`
|
||||
ListInfo *common.ScanListRequest `json:"list_info,omitempty"`
|
||||
}
|
||||
|
||||
var _ model.Obj = (*Files)(nil)
|
||||
|
||||
type Files pubUserFile.File
|
||||
|
||||
func (f *Files) GetSize() int64 {
|
||||
return f.Size
|
||||
}
|
||||
|
||||
func (f *Files) GetName() string {
|
||||
return f.Name
|
||||
}
|
||||
|
||||
func (f *Files) ModTime() time.Time {
|
||||
return time.UnixMilli(f.UpdateTs)
|
||||
}
|
||||
|
||||
func (f *Files) CreateTime() time.Time {
|
||||
return time.UnixMilli(f.UpdateTs)
|
||||
}
|
||||
|
||||
func (f *Files) IsDir() bool {
|
||||
return f.Dir
|
||||
}
|
||||
|
||||
func (f *Files) GetHash() utils.HashInfo {
|
||||
return utils.HashInfo{}
|
||||
}
|
||||
|
||||
func (f *Files) GetID() string {
|
||||
if len(f.Identity) == 0 {
|
||||
f.Identity = "/"
|
||||
}
|
||||
return f.Identity
|
||||
}
|
||||
|
||||
func (f *Files) GetPath() string {
|
||||
return f.Path
|
||||
}
|
||||
|
||||
type SteamFile struct {
|
||||
file model.File
|
||||
}
|
||||
|
||||
func (s *SteamFile) Read(p []byte) (n int, err error) {
|
||||
return s.file.Read(p)
|
||||
}
|
||||
|
||||
func (s *SteamFile) Close() error {
|
||||
return s.file.Close()
|
||||
}
|
385
drivers/halalcloud/util.go
Normal file
385
drivers/halalcloud/util.go
Normal file
@ -0,0 +1,385 @@
|
||||
package halalcloud
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"crypto/tls"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
pbPublicUser "github.com/city404/v6-public-rpc-proto/go/v6/user"
|
||||
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
|
||||
"github.com/google/uuid"
|
||||
"github.com/ipfs/go-cid"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/credentials"
|
||||
"google.golang.org/grpc/metadata"
|
||||
"google.golang.org/grpc/status"
|
||||
"hash"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
AppID = "alist/10001"
|
||||
AppVersion = "1.0.0"
|
||||
AppSecret = "bR4SJwOkvnG5WvVJ"
|
||||
)
|
||||
|
||||
const (
|
||||
grpcServer = "grpcuserapi.2dland.cn:443"
|
||||
grpcServerAuth = "grpcuserapi.2dland.cn"
|
||||
)
|
||||
|
||||
func (d *HalalCloud) NewAuthServiceWithOauth(options ...HalalOption) (*AuthService, error) {
|
||||
|
||||
aService := &AuthService{}
|
||||
err2 := errors.New("")
|
||||
|
||||
svc := d.HalalCommon.AuthService
|
||||
for _, opt := range options {
|
||||
opt.apply(&svc.dopts)
|
||||
}
|
||||
|
||||
grpcOptions := svc.dopts.grpcOptions
|
||||
grpcOptions = append(grpcOptions, grpc.WithAuthority(grpcServerAuth), grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{})), grpc.WithUnaryInterceptor(func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
|
||||
ctxx := svc.signContext(method, ctx)
|
||||
err := invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
|
||||
return err
|
||||
}))
|
||||
|
||||
grpcConnection, err := grpc.NewClient(grpcServer, grpcOptions...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer grpcConnection.Close()
|
||||
userClient := pbPublicUser.NewPubUserClient(grpcConnection)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
stateString := uuid.New().String()
|
||||
// queryValues.Add("callback", oauthToken.Callback)
|
||||
oauthToken, err := userClient.CreateAuthToken(ctx, &pbPublicUser.LoginRequest{
|
||||
ReturnType: 2,
|
||||
State: stateString,
|
||||
ReturnUrl: "",
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(oauthToken.State) < 1 {
|
||||
oauthToken.State = stateString
|
||||
}
|
||||
|
||||
if oauthToken.Url != "" {
|
||||
|
||||
return nil, fmt.Errorf(`need verify: <a target="_blank" href="%s">Click Here</a>`, oauthToken.Url)
|
||||
}
|
||||
|
||||
return aService, err2
|
||||
|
||||
}
|
||||
|
||||
func (d *HalalCloud) NewAuthService(refreshToken string, options ...HalalOption) (*AuthService, error) {
|
||||
svc := d.HalalCommon.AuthService
|
||||
|
||||
if len(refreshToken) < 1 {
|
||||
refreshToken = d.Addition.RefreshToken
|
||||
}
|
||||
|
||||
if len(d.tr.AccessToken) > 0 {
|
||||
accessTokenExpiredAt := d.tr.AccessTokenExpiredAt
|
||||
current := time.Now().UnixMilli()
|
||||
if accessTokenExpiredAt < current {
|
||||
// access token expired
|
||||
d.tr.AccessToken = ""
|
||||
d.tr.AccessTokenExpiredAt = 0
|
||||
} else {
|
||||
svc.tr.AccessTokenExpiredAt = accessTokenExpiredAt
|
||||
svc.tr.AccessToken = d.tr.AccessToken
|
||||
}
|
||||
}
|
||||
|
||||
for _, opt := range options {
|
||||
opt.apply(&svc.dopts)
|
||||
}
|
||||
|
||||
grpcOptions := svc.dopts.grpcOptions
|
||||
grpcOptions = append(grpcOptions, grpc.WithDefaultCallOptions(grpc.MaxCallSendMsgSize(10*1024*1024), grpc.MaxCallRecvMsgSize(10*1024*1024)), grpc.WithAuthority(grpcServerAuth), grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{})), grpc.WithUnaryInterceptor(func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
|
||||
ctxx := svc.signContext(method, ctx)
|
||||
err := invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
|
||||
if err != nil {
|
||||
grpcStatus, ok := status.FromError(err)
|
||||
|
||||
if ok && grpcStatus.Code() == codes.Unauthenticated && strings.Contains(grpcStatus.Err().Error(), "invalid accesstoken") && len(refreshToken) > 0 {
|
||||
// refresh token
|
||||
refreshResponse, err := pbPublicUser.NewPubUserClient(cc).Refresh(ctx, &pbPublicUser.Token{
|
||||
RefreshToken: refreshToken,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(refreshResponse.AccessToken) > 0 {
|
||||
svc.tr.AccessToken = refreshResponse.AccessToken
|
||||
svc.tr.AccessTokenExpiredAt = refreshResponse.AccessTokenExpireTs
|
||||
svc.OnAccessTokenRefreshed(refreshResponse.AccessToken, refreshResponse.AccessTokenExpireTs, refreshResponse.RefreshToken, refreshResponse.RefreshTokenExpireTs)
|
||||
}
|
||||
// retry
|
||||
ctxx := svc.signContext(method, ctx)
|
||||
err = invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
|
||||
if err != nil {
|
||||
return err
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}))
|
||||
grpcConnection, err := grpc.NewClient(grpcServer, grpcOptions...)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
svc.grpcConnection = grpcConnection
|
||||
return svc, err
|
||||
}
|
||||
|
||||
func (s *AuthService) OnAccessTokenRefreshed(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64) {
|
||||
s.tr.AccessToken = accessToken
|
||||
s.tr.AccessTokenExpiredAt = accessTokenExpiredAt
|
||||
s.tr.RefreshToken = refreshToken
|
||||
s.tr.RefreshTokenExpiredAt = refreshTokenExpiredAt
|
||||
|
||||
if s.dopts.onTokenRefreshed != nil {
|
||||
s.dopts.onTokenRefreshed(accessToken, accessTokenExpiredAt, refreshToken, refreshTokenExpiredAt)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *AuthService) GetGrpcConnection() *grpc.ClientConn {
|
||||
return s.grpcConnection
|
||||
}
|
||||
|
||||
func (s *AuthService) Close() {
|
||||
_ = s.grpcConnection.Close()
|
||||
}
|
||||
|
||||
func (s *AuthService) signContext(method string, ctx context.Context) context.Context {
|
||||
var kvString []string
|
||||
currentTimeStamp := strconv.FormatInt(time.Now().UnixMilli(), 10)
|
||||
bufferedString := bytes.NewBufferString(method)
|
||||
kvString = append(kvString, "timestamp", currentTimeStamp)
|
||||
bufferedString.WriteString(currentTimeStamp)
|
||||
kvString = append(kvString, "appid", s.appID)
|
||||
bufferedString.WriteString(s.appID)
|
||||
kvString = append(kvString, "appversion", s.appVersion)
|
||||
bufferedString.WriteString(s.appVersion)
|
||||
if s.tr != nil && len(s.tr.AccessToken) > 0 {
|
||||
authorization := "Bearer " + s.tr.AccessToken
|
||||
kvString = append(kvString, "authorization", authorization)
|
||||
bufferedString.WriteString(authorization)
|
||||
}
|
||||
bufferedString.WriteString(s.appSecret)
|
||||
sign := GetMD5Hash(bufferedString.String())
|
||||
kvString = append(kvString, "sign", sign)
|
||||
return metadata.AppendToOutgoingContext(ctx, kvString...)
|
||||
}
|
||||
|
||||
func (d *HalalCloud) GetCurrentOpDir(dir model.Obj, args []string, index int) string {
|
||||
currentDir := dir.GetPath()
|
||||
if len(currentDir) == 0 {
|
||||
currentDir = "/"
|
||||
}
|
||||
opPath := currentDir + "/" + args[index]
|
||||
if strings.HasPrefix(args[index], "/") {
|
||||
opPath = args[index]
|
||||
}
|
||||
return opPath
|
||||
}
|
||||
|
||||
func (d *HalalCloud) GetCurrentDir(dir model.Obj) string {
|
||||
currentDir := dir.GetPath()
|
||||
if len(currentDir) == 0 {
|
||||
currentDir = "/"
|
||||
}
|
||||
return currentDir
|
||||
}
|
||||
|
||||
type Common struct {
|
||||
}
|
||||
|
||||
func getRawFiles(addr *pubUserFile.SliceDownloadInfo) ([]byte, error) {
|
||||
|
||||
if addr == nil {
|
||||
return nil, errors.New("addr is nil")
|
||||
}
|
||||
|
||||
client := http.Client{
|
||||
Timeout: time.Duration(60 * time.Second), // Set timeout to 5 seconds
|
||||
}
|
||||
resp, err := client.Get(addr.DownloadAddress)
|
||||
if err != nil {
|
||||
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("bad status: %s, body: %s", resp.Status, body)
|
||||
}
|
||||
|
||||
if addr.Encrypt > 0 {
|
||||
cd := uint8(addr.Encrypt)
|
||||
for idx := 0; idx < len(body); idx++ {
|
||||
body[idx] = body[idx] ^ cd
|
||||
}
|
||||
}
|
||||
|
||||
if addr.StoreType != 10 {
|
||||
|
||||
sourceCid, err := cid.Decode(addr.Identity)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
checkCid, err := sourceCid.Prefix().Sum(body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !checkCid.Equals(sourceCid) {
|
||||
return nil, fmt.Errorf("bad cid: %s, body: %s", checkCid.String(), body)
|
||||
}
|
||||
}
|
||||
|
||||
return body, nil
|
||||
|
||||
}
|
||||
|
||||
type openObject struct {
|
||||
ctx context.Context
|
||||
mu sync.Mutex
|
||||
d []*pubUserFile.SliceDownloadInfo
|
||||
id int
|
||||
skip int64
|
||||
chunk *[]byte
|
||||
chunks *[]chunkSize
|
||||
closed bool
|
||||
sha string
|
||||
shaTemp hash.Hash
|
||||
}
|
||||
|
||||
// get the next chunk
|
||||
func (oo *openObject) getChunk(ctx context.Context) (err error) {
|
||||
if oo.id >= len(*oo.chunks) {
|
||||
return io.EOF
|
||||
}
|
||||
var chunk []byte
|
||||
err = utils.Retry(3, time.Second, func() (err error) {
|
||||
chunk, err = getRawFiles(oo.d[oo.id])
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
oo.id++
|
||||
oo.chunk = &chunk
|
||||
return nil
|
||||
}
|
||||
|
||||
// Read reads up to len(p) bytes into p.
|
||||
func (oo *openObject) Read(p []byte) (n int, err error) {
|
||||
oo.mu.Lock()
|
||||
defer oo.mu.Unlock()
|
||||
if oo.closed {
|
||||
return 0, fmt.Errorf("read on closed file")
|
||||
}
|
||||
// Skip data at the start if requested
|
||||
for oo.skip > 0 {
|
||||
//size := 1024 * 1024
|
||||
_, size, err := oo.ChunkLocation(oo.id)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if oo.skip < int64(size) {
|
||||
break
|
||||
}
|
||||
oo.id++
|
||||
oo.skip -= int64(size)
|
||||
}
|
||||
if len(*oo.chunk) == 0 {
|
||||
err = oo.getChunk(oo.ctx)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if oo.skip > 0 {
|
||||
*oo.chunk = (*oo.chunk)[oo.skip:]
|
||||
oo.skip = 0
|
||||
}
|
||||
}
|
||||
n = copy(p, *oo.chunk)
|
||||
*oo.chunk = (*oo.chunk)[n:]
|
||||
|
||||
oo.shaTemp.Write(*oo.chunk)
|
||||
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// Close closed the file - MAC errors are reported here
|
||||
func (oo *openObject) Close() (err error) {
|
||||
oo.mu.Lock()
|
||||
defer oo.mu.Unlock()
|
||||
if oo.closed {
|
||||
return nil
|
||||
}
|
||||
// 校验Sha1
|
||||
if string(oo.shaTemp.Sum(nil)) != oo.sha {
|
||||
return fmt.Errorf("failed to finish download: %w", err)
|
||||
}
|
||||
|
||||
oo.closed = true
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetMD5Hash(text string) string {
|
||||
tHash := md5.Sum([]byte(text))
|
||||
return hex.EncodeToString(tHash[:])
|
||||
}
|
||||
|
||||
// chunkSize describes a size and position of chunk
|
||||
type chunkSize struct {
|
||||
position int64
|
||||
size int
|
||||
}
|
||||
|
||||
func getChunkSizes(sliceSize []*pubUserFile.SliceSize) (chunks []chunkSize) {
|
||||
chunks = make([]chunkSize, 0)
|
||||
for _, s := range sliceSize {
|
||||
// 对最后一个做特殊处理
|
||||
if s.EndIndex == 0 {
|
||||
s.EndIndex = s.StartIndex
|
||||
}
|
||||
for j := s.StartIndex; j <= s.EndIndex; j++ {
|
||||
chunks = append(chunks, chunkSize{position: j, size: int(s.Size)})
|
||||
}
|
||||
}
|
||||
return chunks
|
||||
}
|
||||
|
||||
func (oo *openObject) ChunkLocation(id int) (position int64, size int, err error) {
|
||||
if id < 0 || id >= len(*oo.chunks) {
|
||||
return 0, 0, errors.New("invalid arguments")
|
||||
}
|
||||
|
||||
return (*oo.chunks)[id].position, (*oo.chunks)[id].size, nil
|
||||
}
|
@ -67,26 +67,28 @@ func (d *ILanZou) Drop(ctx context.Context) error {
|
||||
|
||||
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
offset := 1
|
||||
limit := 60
|
||||
var res []ListItem
|
||||
for {
|
||||
var resp ListResp
|
||||
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(map[string]string{
|
||||
"type": "0",
|
||||
"folderId": dir.GetID(),
|
||||
"offset": strconv.Itoa(offset),
|
||||
"limit": strconv.Itoa(limit),
|
||||
}).SetResult(&resp)
|
||||
params := []string{
|
||||
"offset=" + strconv.Itoa(offset),
|
||||
"limit=60",
|
||||
"folderId=" + dir.GetID(),
|
||||
"type=0",
|
||||
}
|
||||
queryString := strings.Join(params, "&")
|
||||
req.SetQueryString(queryString).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res = append(res, resp.List...)
|
||||
if resp.TotalPage <= resp.Offset {
|
||||
if resp.Offset < resp.TotalPage {
|
||||
offset++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
offset++
|
||||
}
|
||||
return utils.SliceConvert(res, func(f ListItem) (model.Obj, error) {
|
||||
updTime, err := time.ParseInLocation("2006-01-02 15:04:05", f.UpdTime, time.Local)
|
||||
@ -118,36 +120,39 @@ func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query := u.Query()
|
||||
query.Set("uuid", d.UUID)
|
||||
query.Set("devType", "6")
|
||||
query.Set("devCode", d.UUID)
|
||||
query.Set("devModel", "chrome")
|
||||
query.Set("devVersion", d.conf.devVersion)
|
||||
query.Set("appVersion", "")
|
||||
ts, err := getTimestamp(d.conf.secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
ts, ts_str, err := getTimestamp(d.conf.secret)
|
||||
|
||||
params := []string{
|
||||
"uuid=" + url.QueryEscape(d.UUID),
|
||||
"devType=6",
|
||||
"devCode=" + url.QueryEscape(d.UUID),
|
||||
"devModel=chrome",
|
||||
"devVersion=" + url.QueryEscape(d.conf.devVersion),
|
||||
"appVersion=",
|
||||
"timestamp=" + ts_str,
|
||||
"appToken=" + url.QueryEscape(d.Token),
|
||||
"enable=0",
|
||||
}
|
||||
query.Set("timestamp", ts)
|
||||
query.Set("appToken", d.Token)
|
||||
query.Set("enable", "1")
|
||||
|
||||
downloadId, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%s", file.GetID(), d.userID)), d.conf.secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query.Set("downloadId", hex.EncodeToString(downloadId))
|
||||
auth, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%d", file.GetID(), time.Now().UnixMilli())), d.conf.secret)
|
||||
params = append(params, "downloadId="+url.QueryEscape(hex.EncodeToString(downloadId)))
|
||||
|
||||
auth, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%d", file.GetID(), ts)), d.conf.secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query.Set("auth", hex.EncodeToString(auth))
|
||||
u.RawQuery = query.Encode()
|
||||
params = append(params, "auth="+url.QueryEscape(hex.EncodeToString(auth)))
|
||||
|
||||
u.RawQuery = strings.Join(params, "&")
|
||||
realURL := u.String()
|
||||
// get the url after redirect
|
||||
res, err := base.NoRedirectClient.R().SetHeaders(map[string]string{
|
||||
//"Origin": d.conf.site,
|
||||
"Referer": d.conf.site + "/",
|
||||
"Referer": d.conf.site + "/",
|
||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
|
||||
}).Get(realURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -155,7 +160,7 @@ func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
if res.StatusCode() == 302 {
|
||||
realURL = res.Header().Get("location")
|
||||
} else {
|
||||
return nil, fmt.Errorf("redirect failed, status: %d", res.StatusCode())
|
||||
return nil, fmt.Errorf("redirect failed, status: %d, msg: %s", res.StatusCode(), utils.Json.Get(res.Body(), "msg").ToString())
|
||||
}
|
||||
link := model.Link{URL: realURL}
|
||||
return &link, nil
|
||||
@ -173,7 +178,7 @@ func (d *ILanZou) MakeDir(ctx context.Context, parentDir model.Obj, dirName stri
|
||||
return nil, err
|
||||
}
|
||||
return &model.Object{
|
||||
ID: utils.Json.Get(res, "list", "0", "id").ToString(),
|
||||
ID: utils.Json.Get(res, "list", 0, "id").ToString(),
|
||||
//Path: "",
|
||||
Name: dirName,
|
||||
Size: 0,
|
||||
@ -284,7 +289,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
||||
req.SetBody(base.Json{
|
||||
"fileId": "",
|
||||
"fileName": stream.GetName(),
|
||||
"fileSize": stream.GetSize() / 1024,
|
||||
"fileSize": stream.GetSize()/1024 + 1,
|
||||
"folderId": dstDir.GetID(),
|
||||
"md5": etag,
|
||||
"type": 1,
|
||||
@ -342,10 +347,12 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
||||
var resp UploadResultResp
|
||||
for i := 0; i < 10; i++ {
|
||||
_, err = d.unproved("/7n/results", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetQueryParams(map[string]string{
|
||||
"tokenList": token,
|
||||
"tokenTime": time.Now().Format("Mon Jan 02 2006 15:04:05 GMT-0700 (MST)"),
|
||||
}).SetResult(&resp)
|
||||
params := []string{
|
||||
"tokenList=" + token,
|
||||
"tokenTime=" + time.Now().Format("Mon Jan 02 2006 15:04:05 GMT-0700 (MST)"),
|
||||
}
|
||||
queryString := strings.Join(params, "&")
|
||||
req.SetQueryString(queryString).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -46,7 +46,7 @@ func init() {
|
||||
bucket: "wpanstore-lanzou",
|
||||
unproved: "unproved",
|
||||
proved: "proved",
|
||||
devVersion: "122",
|
||||
devVersion: "125",
|
||||
site: "https://www.ilanzou.com",
|
||||
},
|
||||
}
|
||||
@ -72,7 +72,7 @@ func init() {
|
||||
bucket: "wpanstore",
|
||||
unproved: "ws",
|
||||
proved: "app",
|
||||
devVersion: "121",
|
||||
devVersion: "125",
|
||||
site: "https://www.feijipan.com",
|
||||
},
|
||||
}
|
||||
|
@ -4,7 +4,9 @@ import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
@ -31,44 +33,53 @@ func (d *ILanZou) login() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTimestamp(secret []byte) (string, error) {
|
||||
func getTimestamp(secret []byte) (int64, string, error) {
|
||||
ts := time.Now().UnixMilli()
|
||||
tsStr := strconv.FormatInt(ts, 10)
|
||||
res, err := mopan.AesEncrypt([]byte(tsStr), secret)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return 0, "", err
|
||||
}
|
||||
return hex.EncodeToString(res), nil
|
||||
return ts, hex.EncodeToString(res), nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, proved bool, retry ...bool) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
ts, err := getTimestamp(d.conf.secret)
|
||||
_, ts_str, err := getTimestamp(d.conf.secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetQueryParams(map[string]string{
|
||||
"uuid": d.UUID,
|
||||
"devType": "6",
|
||||
"devCode": d.UUID,
|
||||
"devModel": "chrome",
|
||||
"devVersion": d.conf.devVersion,
|
||||
"appVersion": "",
|
||||
"timestamp": ts,
|
||||
//"appToken": d.Token,
|
||||
"extra": "2",
|
||||
})
|
||||
req.SetHeaders(map[string]string{
|
||||
"Origin": d.conf.site,
|
||||
"Referer": d.conf.site + "/",
|
||||
})
|
||||
if proved {
|
||||
req.SetQueryParam("appToken", d.Token)
|
||||
|
||||
params := []string{
|
||||
"uuid=" + url.QueryEscape(d.UUID),
|
||||
"devType=6",
|
||||
"devCode=" + url.QueryEscape(d.UUID),
|
||||
"devModel=chrome",
|
||||
"devVersion=" + url.QueryEscape(d.conf.devVersion),
|
||||
"appVersion=",
|
||||
"timestamp=" + ts_str,
|
||||
}
|
||||
|
||||
if proved {
|
||||
params = append(params, "appToken="+url.QueryEscape(d.Token))
|
||||
}
|
||||
|
||||
params = append(params, "extra=2")
|
||||
|
||||
queryString := strings.Join(params, "&")
|
||||
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"Origin": d.conf.site,
|
||||
"Referer": d.conf.site + "/",
|
||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
|
||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
||||
})
|
||||
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
res, err := req.Execute(method, d.conf.base+pathname)
|
||||
|
||||
res, err := req.Execute(method, d.conf.base+pathname+"?"+queryString)
|
||||
if err != nil {
|
||||
if res != nil {
|
||||
log.Errorf("[iLanZou] request error: %s", res.String())
|
||||
|
273
drivers/kodbox/driver.go
Normal file
273
drivers/kodbox/driver.go
Normal file
@ -0,0 +1,273 @@
|
||||
package kodbox
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type KodBox struct {
|
||||
model.Storage
|
||||
Addition
|
||||
authorization string
|
||||
}
|
||||
|
||||
func (d *KodBox) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *KodBox) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *KodBox) Init(ctx context.Context) error {
|
||||
d.Address = strings.TrimSuffix(d.Address, "/")
|
||||
d.RootFolderPath = strings.TrimPrefix(utils.FixAndCleanPath(d.RootFolderPath), "/")
|
||||
return d.getToken()
|
||||
}
|
||||
|
||||
func (d *KodBox) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *KodBox) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
var (
|
||||
resp *CommonResp
|
||||
listPathData *ListPathData
|
||||
)
|
||||
|
||||
_, err := d.request(http.MethodPost, "/?explorer/list/path", func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetFormData(map[string]string{
|
||||
"path": dir.GetPath(),
|
||||
})
|
||||
}, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dataBytes, err := utils.Json.Marshal(resp.Data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = utils.Json.Unmarshal(dataBytes, &listPathData)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
FolderAndFiles := append(listPathData.FolderList, listPathData.FileList...)
|
||||
|
||||
return utils.SliceConvert(FolderAndFiles, func(f FolderOrFile) (model.Obj, error) {
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
Path: f.Path,
|
||||
Name: f.Name,
|
||||
Ctime: time.Unix(f.CreateTime, 0),
|
||||
Modified: time.Unix(f.ModifyTime, 0),
|
||||
Size: f.Size,
|
||||
IsFolder: f.Type == "folder",
|
||||
},
|
||||
//Thumbnail: model.Thumbnail{},
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *KodBox) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
path := file.GetPath()
|
||||
return &model.Link{
|
||||
URL: fmt.Sprintf("%s/?explorer/index/fileOut&path=%s&download=1&accessToken=%s",
|
||||
d.Address,
|
||||
path,
|
||||
d.authorization)}, nil
|
||||
}
|
||||
|
||||
func (d *KodBox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
var resp *CommonResp
|
||||
newDirPath := filepath.Join(parentDir.GetPath(), dirName)
|
||||
|
||||
_, err := d.request(http.MethodPost, "/?explorer/index/mkdir", func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetFormData(map[string]string{
|
||||
"path": newDirPath,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
code := resp.Code.(bool)
|
||||
if !code {
|
||||
return nil, fmt.Errorf("%s", resp.Data)
|
||||
}
|
||||
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
Path: resp.Info.(string),
|
||||
Name: dirName,
|
||||
IsFolder: true,
|
||||
Modified: time.Now(),
|
||||
Ctime: time.Now(),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *KodBox) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
var resp *CommonResp
|
||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathCuteTo", func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetFormData(map[string]string{
|
||||
"dataArr": fmt.Sprintf("[{\"path\": \"%s\", \"name\": \"%s\"}]",
|
||||
srcObj.GetPath(),
|
||||
srcObj.GetName()),
|
||||
"path": dstDir.GetPath(),
|
||||
})
|
||||
}, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
code := resp.Code.(bool)
|
||||
if !code {
|
||||
return nil, fmt.Errorf("%s", resp.Data)
|
||||
}
|
||||
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
Path: srcObj.GetPath(),
|
||||
Name: srcObj.GetName(),
|
||||
IsFolder: srcObj.IsDir(),
|
||||
Modified: srcObj.ModTime(),
|
||||
Ctime: srcObj.CreateTime(),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *KodBox) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
var resp *CommonResp
|
||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathRename", func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetFormData(map[string]string{
|
||||
"path": srcObj.GetPath(),
|
||||
"newName": newName,
|
||||
})
|
||||
}, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
code := resp.Code.(bool)
|
||||
if !code {
|
||||
return nil, fmt.Errorf("%s", resp.Data)
|
||||
}
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
Path: srcObj.GetPath(),
|
||||
Name: newName,
|
||||
IsFolder: srcObj.IsDir(),
|
||||
Modified: time.Now(),
|
||||
Ctime: srcObj.CreateTime(),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *KodBox) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
var resp *CommonResp
|
||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathCopyTo", func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetFormData(map[string]string{
|
||||
"dataArr": fmt.Sprintf("[{\"path\": \"%s\", \"name\": \"%s\"}]",
|
||||
srcObj.GetPath(),
|
||||
srcObj.GetName()),
|
||||
"path": dstDir.GetPath(),
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
code := resp.Code.(bool)
|
||||
if !code {
|
||||
return nil, fmt.Errorf("%s", resp.Data)
|
||||
}
|
||||
|
||||
path := resp.Info.([]interface{})[0].(string)
|
||||
objectName, err := d.getFileOrFolderName(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
Path: path,
|
||||
Name: *objectName,
|
||||
IsFolder: srcObj.IsDir(),
|
||||
Modified: time.Now(),
|
||||
Ctime: time.Now(),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *KodBox) Remove(ctx context.Context, obj model.Obj) error {
|
||||
var resp *CommonResp
|
||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathDelete", func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetFormData(map[string]string{
|
||||
"dataArr": fmt.Sprintf("[{\"path\": \"%s\", \"name\": \"%s\"}]",
|
||||
obj.GetPath(),
|
||||
obj.GetName()),
|
||||
"shiftDelete": "1",
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
code := resp.Code.(bool)
|
||||
if !code {
|
||||
return fmt.Errorf("%s", resp.Data)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *KodBox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
var resp *CommonResp
|
||||
_, err := d.request(http.MethodPost, "/?explorer/upload/fileUpload", func(req *resty.Request) {
|
||||
req.SetFileReader("file", stream.GetName(), stream).
|
||||
SetResult(&resp).
|
||||
SetFormData(map[string]string{
|
||||
"path": dstDir.GetPath(),
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
code := resp.Code.(bool)
|
||||
if !code {
|
||||
return nil, fmt.Errorf("%s", resp.Data)
|
||||
}
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
Path: resp.Info.(string),
|
||||
Name: stream.GetName(),
|
||||
Size: stream.GetSize(),
|
||||
IsFolder: false,
|
||||
Modified: time.Now(),
|
||||
Ctime: time.Now(),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *KodBox) getFileOrFolderName(ctx context.Context, path string) (*string, error) {
|
||||
var resp *CommonResp
|
||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathInfo", func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetFormData(map[string]string{
|
||||
"dataArr": fmt.Sprintf("[{\"path\": \"%s\"}]", path)})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
code := resp.Code.(bool)
|
||||
if !code {
|
||||
return nil, fmt.Errorf("%s", resp.Data)
|
||||
}
|
||||
folderOrFileName := resp.Data.(map[string]any)["name"].(string)
|
||||
return &folderOrFileName, nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*KodBox)(nil)
|
25
drivers/kodbox/meta.go
Normal file
25
drivers/kodbox/meta.go
Normal file
@ -0,0 +1,25 @@
|
||||
package kodbox
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootPath
|
||||
|
||||
Address string `json:"address" required:"true"`
|
||||
UserName string `json:"username" required:"false"`
|
||||
Password string `json:"password" required:"false"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "KodBox",
|
||||
DefaultRoot: "",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &KodBox{}
|
||||
})
|
||||
}
|
24
drivers/kodbox/types.go
Normal file
24
drivers/kodbox/types.go
Normal file
@ -0,0 +1,24 @@
|
||||
package kodbox
|
||||
|
||||
type CommonResp struct {
|
||||
Code any `json:"code"`
|
||||
TimeUse string `json:"timeUse"`
|
||||
TimeNow string `json:"timeNow"`
|
||||
Data any `json:"data"`
|
||||
Info any `json:"info"`
|
||||
}
|
||||
|
||||
type ListPathData struct {
|
||||
FolderList []FolderOrFile `json:"folderList"`
|
||||
FileList []FolderOrFile `json:"fileList"`
|
||||
}
|
||||
|
||||
type FolderOrFile struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Type string `json:"type"`
|
||||
Ext string `json:"ext,omitempty"` // 文件特有字段
|
||||
Size int64 `json:"size"`
|
||||
CreateTime int64 `json:"createTime"`
|
||||
ModifyTime int64 `json:"modifyTime"`
|
||||
}
|
86
drivers/kodbox/util.go
Normal file
86
drivers/kodbox/util.go
Normal file
@ -0,0 +1,86 @@
|
||||
package kodbox
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func (d *KodBox) getToken() error {
|
||||
var authResp CommonResp
|
||||
res, err := base.RestyClient.R().
|
||||
SetResult(&authResp).
|
||||
SetQueryParams(map[string]string{
|
||||
"name": d.UserName,
|
||||
"password": d.Password,
|
||||
}).
|
||||
Post(d.Address + "/?user/index/loginSubmit")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode() >= 400 {
|
||||
return fmt.Errorf("get token failed: %s", res.String())
|
||||
}
|
||||
|
||||
if res.StatusCode() == 200 && authResp.Code.(bool) == false {
|
||||
return fmt.Errorf("get token failed: %s", res.String())
|
||||
}
|
||||
|
||||
d.authorization = fmt.Sprintf("%s", authResp.Info)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *KodBox) request(method string, pathname string, callback base.ReqCallback, noRedirect ...bool) ([]byte, error) {
|
||||
full := pathname
|
||||
if !strings.HasPrefix(pathname, "http") {
|
||||
full = d.Address + pathname
|
||||
}
|
||||
req := base.RestyClient.R()
|
||||
if len(noRedirect) > 0 && noRedirect[0] {
|
||||
req = base.NoRedirectClient.R()
|
||||
}
|
||||
req.SetFormData(map[string]string{
|
||||
"accessToken": d.authorization,
|
||||
})
|
||||
callback(req)
|
||||
|
||||
var (
|
||||
res *resty.Response
|
||||
commonResp *CommonResp
|
||||
err error
|
||||
skip bool
|
||||
)
|
||||
for i := 0; i < 2; i++ {
|
||||
if skip {
|
||||
break
|
||||
}
|
||||
res, err = req.Execute(method, full)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err := utils.Json.Unmarshal(res.Body(), &commonResp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch commonResp.Code.(type) {
|
||||
case bool:
|
||||
skip = true
|
||||
case string:
|
||||
if commonResp.Code.(string) == "10001" {
|
||||
err = d.getToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetFormData(map[string]string{"accessToken": d.authorization})
|
||||
}
|
||||
}
|
||||
}
|
||||
if commonResp.Code.(bool) == false {
|
||||
return nil, fmt.Errorf("request failed: %s", commonResp.Data)
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user