Compare commits
6 Commits
v3.40.0
...
feat/ilanz
Author | SHA1 | Date | |
---|---|---|---|
af920e00d4 | |||
697512aa8d | |||
8d0f80db33 | |||
c39909c641 | |||
7868a1a524 | |||
9a356ec9d6 |
44
.air.toml
44
.air.toml
@ -1,44 +0,0 @@
|
|||||||
root = "."
|
|
||||||
testdata_dir = "testdata"
|
|
||||||
tmp_dir = "tmp"
|
|
||||||
|
|
||||||
[build]
|
|
||||||
args_bin = ["server"]
|
|
||||||
bin = "./tmp/main"
|
|
||||||
cmd = "go build -o ./tmp/main ."
|
|
||||||
delay = 0
|
|
||||||
exclude_dir = ["assets", "tmp", "vendor", "testdata"]
|
|
||||||
exclude_file = []
|
|
||||||
exclude_regex = ["_test.go"]
|
|
||||||
exclude_unchanged = false
|
|
||||||
follow_symlink = false
|
|
||||||
full_bin = ""
|
|
||||||
include_dir = []
|
|
||||||
include_ext = ["go", "tpl", "tmpl", "html"]
|
|
||||||
include_file = []
|
|
||||||
kill_delay = "0s"
|
|
||||||
log = "build-errors.log"
|
|
||||||
poll = false
|
|
||||||
poll_interval = 0
|
|
||||||
rerun = false
|
|
||||||
rerun_delay = 500
|
|
||||||
send_interrupt = false
|
|
||||||
stop_on_error = false
|
|
||||||
|
|
||||||
[color]
|
|
||||||
app = ""
|
|
||||||
build = "yellow"
|
|
||||||
main = "magenta"
|
|
||||||
runner = "green"
|
|
||||||
watcher = "cyan"
|
|
||||||
|
|
||||||
[log]
|
|
||||||
main_only = false
|
|
||||||
time = false
|
|
||||||
|
|
||||||
[misc]
|
|
||||||
clean_on_exit = false
|
|
||||||
|
|
||||||
[screen]
|
|
||||||
clear_on_rebuild = false
|
|
||||||
keep_scroll = true
|
|
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,5 +1,5 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Questions & Discussions
|
- name: Questions & Discussions
|
||||||
url: https://github.com/alist-org/alist/discussions
|
url: https://github.com/Xhofe/alist/discussions
|
||||||
about: Use GitHub discussions for message-board style questions and discussions.
|
about: Use GitHub discussions for message-board style questions and discussions.
|
124
.github/workflows/beta_release.yml
vendored
124
.github/workflows/beta_release.yml
vendored
@ -1,124 +0,0 @@
|
|||||||
name: beta release
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ 'main' ]
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
changelog:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
platform: [ ubuntu-latest ]
|
|
||||||
go-version: [ '1.21' ]
|
|
||||||
name: Beta Release Changelog
|
|
||||||
runs-on: ${{ matrix.platform }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Create or update ref
|
|
||||||
id: create-or-update-ref
|
|
||||||
uses: ovsds/create-or-update-ref-action@v1
|
|
||||||
with:
|
|
||||||
ref: tags/beta
|
|
||||||
sha: ${{ github.sha }}
|
|
||||||
|
|
||||||
- name: Delete beta tag
|
|
||||||
run: git tag -d beta
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: changelog # or changelogithub@0.12 if ensure the stable result
|
|
||||||
id: changelog
|
|
||||||
run: |
|
|
||||||
git tag -l
|
|
||||||
npx changelogithub --output CHANGELOG.md
|
|
||||||
# npx changelogen@latest --output CHANGELOG.md
|
|
||||||
|
|
||||||
- name: Upload assets
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
with:
|
|
||||||
body_path: CHANGELOG.md
|
|
||||||
files: CHANGELOG.md
|
|
||||||
prerelease: true
|
|
||||||
tag_name: beta
|
|
||||||
|
|
||||||
release:
|
|
||||||
needs:
|
|
||||||
- changelog
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- target: '!(*musl*|*windows-arm64*|*android*|*freebsd*)' # xgo
|
|
||||||
hash: "md5"
|
|
||||||
- target: 'linux-!(arm*)-musl*' #musl-not-arm
|
|
||||||
hash: "md5-linux-musl"
|
|
||||||
- target: 'linux-arm*-musl*' #musl-arm
|
|
||||||
hash: "md5-linux-musl-arm"
|
|
||||||
- target: 'windows-arm64' #win-arm64
|
|
||||||
hash: "md5-windows-arm64"
|
|
||||||
- target: 'android-*' #android
|
|
||||||
hash: "md5-android"
|
|
||||||
- target: 'freebsd-*' #freebsd
|
|
||||||
hash: "md5-freebsd"
|
|
||||||
|
|
||||||
name: Beta Release
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Setup Go
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: '1.22'
|
|
||||||
|
|
||||||
- name: Setup web
|
|
||||||
run: bash build.sh dev web
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
id: test-action
|
|
||||||
uses: go-cross/cgo-actions@v1
|
|
||||||
with:
|
|
||||||
targets: ${{ matrix.target }}
|
|
||||||
musl-target-format: $os-$musl-$arch
|
|
||||||
out-dir: build
|
|
||||||
|
|
||||||
- name: Compress
|
|
||||||
run: |
|
|
||||||
bash build.sh zip ${{ matrix.hash }}
|
|
||||||
|
|
||||||
- name: Upload assets
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
with:
|
|
||||||
files: build/compress/*
|
|
||||||
prerelease: true
|
|
||||||
tag_name: beta
|
|
||||||
|
|
||||||
desktop:
|
|
||||||
needs:
|
|
||||||
- release
|
|
||||||
name: Beta Release Desktop
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: peter-evans/create-or-update-comment@v4
|
|
||||||
with:
|
|
||||||
issue-number: 69
|
|
||||||
body: |
|
|
||||||
/release-beta
|
|
||||||
- triggered by @${{ github.actor }}
|
|
||||||
- commit sha: ${{ github.sha }}
|
|
||||||
- view files: https://github.com/alist-org/alist/tree/${{ github.sha }}
|
|
||||||
reactions: 'rocket'
|
|
||||||
token: ${{ secrets.MY_TOKEN }}
|
|
||||||
repository: alist-org/desktop-release
|
|
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
|||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: benjlevesque/short-sha@v3.0
|
- uses: benjlevesque/short-sha@v2.2
|
||||||
id: short-sha
|
id: short-sha
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
|
73
.github/workflows/build_docker.yml
vendored
73
.github/workflows/build_docker.yml
vendored
@ -3,8 +3,6 @@ name: build_docker
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ main ]
|
branches: [ main ]
|
||||||
pull_request:
|
|
||||||
branches: [ main ]
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
@ -12,97 +10,42 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_docker:
|
build_docker:
|
||||||
name: Build Docker
|
name: Build docker
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: xhofe/alist
|
images: xhofe/alist
|
||||||
tags: |
|
- name: Replace release with dev
|
||||||
type=schedule
|
run: |
|
||||||
type=ref,event=branch
|
sed -i 's/release/dev/g' Dockerfile
|
||||||
type=ref,event=tag
|
|
||||||
type=ref,event=pr
|
|
||||||
type=raw,value=beta,enable={{is_default_branch}}
|
|
||||||
|
|
||||||
- name: Docker meta with ffmpeg
|
|
||||||
id: meta-ffmpeg
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: xhofe/alist
|
|
||||||
flavor: |
|
|
||||||
suffix=-ffmpeg
|
|
||||||
tags: |
|
|
||||||
type=schedule
|
|
||||||
type=ref,event=branch
|
|
||||||
type=ref,event=tag
|
|
||||||
type=ref,event=pr
|
|
||||||
type=raw,value=beta,enable={{is_default_branch}}
|
|
||||||
|
|
||||||
- uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: 'stable'
|
|
||||||
|
|
||||||
- name: Cache Musl
|
|
||||||
id: cache-musl
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: build/musl-libs
|
|
||||||
key: docker-musl-libs-v2
|
|
||||||
|
|
||||||
- name: Download Musl Library
|
|
||||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
|
||||||
run: bash build.sh prepare docker-multiplatform
|
|
||||||
|
|
||||||
- name: Build go binary
|
|
||||||
run: bash build.sh dev docker-multiplatform
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: github.event_name == 'push'
|
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: xhofe
|
username: xhofe
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: Dockerfile.ci
|
push: true
|
||||||
push: ${{ github.event_name == 'push' }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
- name: Build and push with ffmpeg
|
|
||||||
id: docker_build_ffmpeg
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: Dockerfile.ci
|
|
||||||
push: ${{ github.event_name == 'push' }}
|
|
||||||
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
|
||||||
build-args: INSTALL_FFMPEG=true
|
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64
|
|
||||||
|
|
||||||
build_docker_with_aria2:
|
build_docker_with_aria2:
|
||||||
needs: build_docker
|
needs: build_docker
|
||||||
name: Build docker with aria2
|
name: Build docker with aria2
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'push'
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@ -123,4 +66,4 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.MY_TOKEN }}
|
github_token: ${{ secrets.MY_TOKEN }}
|
||||||
branch: main
|
branch: main
|
||||||
repository: alist-org/with_aria2
|
repository: alist-org/with_aria2
|
7
.github/workflows/changelog.yml
vendored
7
.github/workflows/changelog.yml
vendored
@ -3,7 +3,7 @@ name: auto changelog
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- 'v*'
|
- '*'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
changelog:
|
changelog:
|
||||||
@ -14,11 +14,6 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Delete beta tag
|
|
||||||
run: git tag -d beta
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
||||||
|
2
.github/workflows/issue_question.yml
vendored
2
.github/workflows/issue_question.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
|||||||
if: github.event.label.name == 'question'
|
if: github.event.label.name == 'question'
|
||||||
steps:
|
steps:
|
||||||
- name: Create comment
|
- name: Create comment
|
||||||
uses: actions-cool/issues-helper@v3.6.0
|
uses: actions-cool/issues-helper@v3.5.2
|
||||||
with:
|
with:
|
||||||
actions: 'create-comment'
|
actions: 'create-comment'
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
19
.github/workflows/release.yml
vendored
19
.github/workflows/release.yml
vendored
@ -13,23 +13,6 @@ jobs:
|
|||||||
name: Release
|
name: Release
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Free Disk Space (Ubuntu)
|
|
||||||
uses: jlumbroso/free-disk-space@main
|
|
||||||
with:
|
|
||||||
# this might remove tools that are actually needed,
|
|
||||||
# if set to "true" but frees about 6 GB
|
|
||||||
tool-cache: false
|
|
||||||
|
|
||||||
# all of these default to true, but feel free to set to
|
|
||||||
# "false" if necessary for your workflow
|
|
||||||
android: true
|
|
||||||
dotnet: true
|
|
||||||
haskell: true
|
|
||||||
large-packages: true
|
|
||||||
docker-images: true
|
|
||||||
swap-storage: true
|
|
||||||
|
|
||||||
- name: Prerelease
|
- name: Prerelease
|
||||||
uses: irongut/EditRelease@v1.2.0
|
uses: irongut/EditRelease@v1.2.0
|
||||||
with:
|
with:
|
||||||
@ -59,7 +42,7 @@ jobs:
|
|||||||
bash build.sh release
|
bash build.sh release
|
||||||
|
|
||||||
- name: Upload assets
|
- name: Upload assets
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v1
|
||||||
with:
|
with:
|
||||||
files: build/compress/*
|
files: build/compress/*
|
||||||
prerelease: false
|
prerelease: false
|
||||||
|
34
.github/workflows/release_android.yml
vendored
34
.github/workflows/release_android.yml
vendored
@ -1,34 +0,0 @@
|
|||||||
name: release_android
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [ published ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release_android:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
platform: [ ubuntu-latest ]
|
|
||||||
go-version: [ '1.21' ]
|
|
||||||
name: Release
|
|
||||||
runs-on: ${{ matrix.platform }}
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: Setup Go
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: ${{ matrix.go-version }}
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
bash build.sh release android
|
|
||||||
|
|
||||||
- name: Upload assets
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
with:
|
|
||||||
files: build/compress/*
|
|
46
.github/workflows/release_docker.yml
vendored
46
.github/workflows/release_docker.yml
vendored
@ -3,7 +3,7 @@ name: release_docker
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- 'v*'
|
- '*'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
release_docker:
|
release_docker:
|
||||||
@ -13,24 +13,6 @@ jobs:
|
|||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: 'stable'
|
|
||||||
|
|
||||||
- name: Cache Musl
|
|
||||||
id: cache-musl
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: build/musl-libs
|
|
||||||
key: docker-musl-libs-v2
|
|
||||||
|
|
||||||
- name: Download Musl Library
|
|
||||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
|
||||||
run: bash build.sh prepare docker-multiplatform
|
|
||||||
|
|
||||||
- name: Build go binary
|
|
||||||
run: bash build.sh release docker-multiplatform
|
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v5
|
||||||
@ -51,35 +33,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: Dockerfile.ci
|
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
- name: Docker meta with ffmpeg
|
|
||||||
id: meta-ffmpeg
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: xhofe/alist
|
|
||||||
flavor: |
|
|
||||||
latest=true
|
|
||||||
suffix=-ffmpeg,onlatest=true
|
|
||||||
|
|
||||||
- name: Build and push with ffmpeg
|
|
||||||
id: docker_build_ffmpeg
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: Dockerfile.ci
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
|
||||||
build-args: INSTALL_FFMPEG=true
|
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64
|
|
||||||
|
|
||||||
release_docker_with_aria2:
|
release_docker_with_aria2:
|
||||||
needs: release_docker
|
needs: release_docker
|
||||||
|
34
.github/workflows/release_freebsd.yml
vendored
34
.github/workflows/release_freebsd.yml
vendored
@ -1,34 +0,0 @@
|
|||||||
name: release_freebsd
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [ published ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release_freebsd:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
platform: [ ubuntu-latest ]
|
|
||||||
go-version: [ '1.21' ]
|
|
||||||
name: Release
|
|
||||||
runs-on: ${{ matrix.platform }}
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: Setup Go
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: ${{ matrix.go-version }}
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
bash build.sh release freebsd
|
|
||||||
|
|
||||||
- name: Upload assets
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
with:
|
|
||||||
files: build/compress/*
|
|
2
.github/workflows/release_linux_musl.yml
vendored
2
.github/workflows/release_linux_musl.yml
vendored
@ -29,6 +29,6 @@ jobs:
|
|||||||
bash build.sh release linux_musl
|
bash build.sh release linux_musl
|
||||||
|
|
||||||
- name: Upload assets
|
- name: Upload assets
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v1
|
||||||
with:
|
with:
|
||||||
files: build/compress/*
|
files: build/compress/*
|
||||||
|
2
.github/workflows/release_linux_musl_arm.yml
vendored
2
.github/workflows/release_linux_musl_arm.yml
vendored
@ -29,6 +29,6 @@ jobs:
|
|||||||
bash build.sh release linux_musl_arm
|
bash build.sh release linux_musl_arm
|
||||||
|
|
||||||
- name: Upload assets
|
- name: Upload assets
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v1
|
||||||
with:
|
with:
|
||||||
files: build/compress/*
|
files: build/compress/*
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -24,7 +24,6 @@ output/
|
|||||||
*.json
|
*.json
|
||||||
/build
|
/build
|
||||||
/data/
|
/data/
|
||||||
/tmp/
|
|
||||||
/log/
|
/log/
|
||||||
/lang/
|
/lang/
|
||||||
/daemon/
|
/daemon/
|
||||||
|
24
Dockerfile
24
Dockerfile
@ -1,30 +1,18 @@
|
|||||||
FROM alpine:edge as builder
|
FROM alpine:edge as builder
|
||||||
LABEL stage=go-builder
|
LABEL stage=go-builder
|
||||||
WORKDIR /app/
|
WORKDIR /app/
|
||||||
RUN apk add --no-cache bash curl gcc git go musl-dev
|
|
||||||
COPY go.mod go.sum ./
|
|
||||||
RUN go mod download
|
|
||||||
COPY ./ ./
|
COPY ./ ./
|
||||||
RUN bash build.sh release docker
|
RUN apk add --no-cache bash curl gcc git go musl-dev; \
|
||||||
|
bash build.sh release docker
|
||||||
|
|
||||||
FROM alpine:edge
|
FROM alpine:edge
|
||||||
|
|
||||||
ARG INSTALL_FFMPEG=false
|
|
||||||
LABEL MAINTAINER="i@nn.ci"
|
LABEL MAINTAINER="i@nn.ci"
|
||||||
|
VOLUME /opt/alist/data/
|
||||||
WORKDIR /opt/alist/
|
WORKDIR /opt/alist/
|
||||||
|
|
||||||
RUN apk update && \
|
|
||||||
apk upgrade --no-cache && \
|
|
||||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
|
||||||
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
|
||||||
rm -rf /var/cache/apk/*
|
|
||||||
|
|
||||||
COPY --from=builder /app/bin/alist ./
|
COPY --from=builder /app/bin/alist ./
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
RUN chmod +x /entrypoint.sh && /entrypoint.sh version
|
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||||
|
chmod +x /entrypoint.sh
|
||||||
ENV PUID=0 PGID=0 UMASK=022
|
ENV PUID=0 PGID=0 UMASK=022
|
||||||
VOLUME /opt/alist/data/
|
|
||||||
EXPOSE 5244 5245
|
EXPOSE 5244 5245
|
||||||
CMD [ "/entrypoint.sh" ]
|
CMD [ "/entrypoint.sh" ]
|
||||||
|
@ -1,22 +0,0 @@
|
|||||||
FROM alpine:edge
|
|
||||||
|
|
||||||
ARG TARGETPLATFORM
|
|
||||||
ARG INSTALL_FFMPEG=false
|
|
||||||
LABEL MAINTAINER="i@nn.ci"
|
|
||||||
|
|
||||||
WORKDIR /opt/alist/
|
|
||||||
|
|
||||||
RUN apk update && \
|
|
||||||
apk upgrade --no-cache && \
|
|
||||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
|
||||||
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
|
||||||
rm -rf /var/cache/apk/*
|
|
||||||
|
|
||||||
COPY /build/${TARGETPLATFORM}/alist ./
|
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
|
||||||
RUN chmod +x /entrypoint.sh && /entrypoint.sh version
|
|
||||||
|
|
||||||
ENV PUID=0 PGID=0 UMASK=022
|
|
||||||
VOLUME /opt/alist/data/
|
|
||||||
EXPOSE 5244 5245
|
|
||||||
CMD [ "/entrypoint.sh" ]
|
|
22
README.md
22
README.md
@ -1,17 +1,17 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||||
<p><em>🗂️A file list program that supports multiple storages, powered by Gin and Solidjs.</em></p>
|
<p><em>🗂️A file list program that supports multiple storages, powered by Gin and Solidjs.</em></p>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
|
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
|
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/releases">
|
<a href="https://github.com/Xhofe/alist/releases">
|
||||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||||
</a>
|
</a>
|
||||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||||
@ -19,13 +19,13 @@
|
|||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://github.com/alist-org/alist/discussions">
|
<a href="https://github.com/Xhofe/alist/discussions">
|
||||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://discord.gg/F4ymsH4xv2">
|
<a href="https://discord.gg/F4ymsH4xv2">
|
||||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/releases">
|
<a href="https://github.com/Xhofe/alist/releases">
|
||||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||||
@ -75,8 +75,6 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
|||||||
- [x] [115](https://115.com/)
|
- [x] [115](https://115.com/)
|
||||||
- [X] Cloudreve
|
- [X] Cloudreve
|
||||||
- [x] [Dropbox](https://www.dropbox.com/)
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
- [x] [FeijiPan](https://www.feijipan.com/)
|
|
||||||
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
|
|
||||||
- [x] Easy to deploy and out-of-the-box
|
- [x] Easy to deploy and out-of-the-box
|
||||||
- [x] File preview (PDF, markdown, code, plain text, ...)
|
- [x] File preview (PDF, markdown, code, plain text, ...)
|
||||||
- [x] Image preview in gallery mode
|
- [x] Image preview in gallery mode
|
||||||
@ -106,7 +104,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
|||||||
|
|
||||||
## Discussion
|
## Discussion
|
||||||
|
|
||||||
Please go to our [discussion forum](https://github.com/alist-org/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
|
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
|
||||||
|
|
||||||
## Sponsor
|
## Sponsor
|
||||||
|
|
||||||
@ -115,9 +113,9 @@ https://alist.nn.ci/guide/sponsor.html
|
|||||||
|
|
||||||
### Special sponsors
|
### Special sponsors
|
||||||
|
|
||||||
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||||
- [找资源](http://zhaoziyuan2.cc/) - 阿里云盘资源搜索引擎
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
## Contributors
|
## Contributors
|
||||||
|
|
||||||
@ -138,4 +136,4 @@ The `AList` is open-source software licensed under the AGPL-3.0 license.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||||
|
22
README_cn.md
22
README_cn.md
@ -1,17 +1,17 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||||
<p><em>🗂一个支持多存储的文件列表程序,使用 Gin 和 Solidjs。</em></p>
|
<p><em>🗂一个支持多存储的文件列表程序,使用 Gin 和 Solidjs。</em></p>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
|
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
|
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/releases">
|
<a href="https://github.com/Xhofe/alist/releases">
|
||||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||||
</a>
|
</a>
|
||||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||||
@ -19,13 +19,13 @@
|
|||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://github.com/alist-org/alist/discussions">
|
<a href="https://github.com/Xhofe/alist/discussions">
|
||||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://discord.gg/F4ymsH4xv2">
|
<a href="https://discord.gg/F4ymsH4xv2">
|
||||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/releases">
|
<a href="https://github.com/Xhofe/alist/releases">
|
||||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||||
@ -74,8 +74,6 @@
|
|||||||
- [x] [115](https://115.com/)
|
- [x] [115](https://115.com/)
|
||||||
- [X] Cloudreve
|
- [X] Cloudreve
|
||||||
- [x] [Dropbox](https://www.dropbox.com/)
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
- [x] [飞机盘](https://www.feijipan.com/)
|
|
||||||
- [x] [多吉云](https://www.dogecloud.com/product/oss)
|
|
||||||
- [x] 部署方便,开箱即用
|
- [x] 部署方便,开箱即用
|
||||||
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
||||||
- [x] 画廊模式下的图像预览
|
- [x] 画廊模式下的图像预览
|
||||||
@ -105,7 +103,7 @@
|
|||||||
|
|
||||||
## 讨论
|
## 讨论
|
||||||
|
|
||||||
一般问题请到[讨论论坛](https://github.com/alist-org/alist/discussions) ,**issue仅针对错误报告和功能请求。**
|
一般问题请到[讨论论坛](https://github.com/Xhofe/alist/discussions) ,**issue仅针对错误报告和功能请求。**
|
||||||
|
|
||||||
## 赞助
|
## 赞助
|
||||||
|
|
||||||
@ -113,9 +111,9 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
|
|||||||
|
|
||||||
### 特别赞助
|
### 特别赞助
|
||||||
|
|
||||||
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - 苹果生态下优雅的网盘视频播放器,iPhone,iPad,Mac,Apple TV全平台支持。
|
- [VidHub](https://zh.okaapps.com/product/1659622164?ref=alist) - 苹果生态下优雅的网盘视频播放器,iPhone,iPad,Mac,Apple TV全平台支持。
|
||||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
|
||||||
- [找资源](http://zhaoziyuan2.cc/) - 阿里云盘资源搜索引擎
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
## 贡献者
|
## 贡献者
|
||||||
|
|
||||||
@ -136,4 +134,4 @@ Thanks goes to these wonderful people:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||||
|
22
README_ja.md
22
README_ja.md
@ -1,17 +1,17 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||||
<p><em>🗂️Gin と Solidjs による、複数のストレージをサポートするファイルリストプログラム。</em></p>
|
<p><em>🗂️Gin と Solidjs による、複数のストレージをサポートするファイルリストプログラム。</em></p>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
|
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||||
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
|
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||||
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/releases">
|
<a href="https://github.com/Xhofe/alist/releases">
|
||||||
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||||
</a>
|
</a>
|
||||||
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||||
@ -19,13 +19,13 @@
|
|||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://github.com/alist-org/alist/discussions">
|
<a href="https://github.com/Xhofe/alist/discussions">
|
||||||
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://discord.gg/F4ymsH4xv2">
|
<a href="https://discord.gg/F4ymsH4xv2">
|
||||||
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://github.com/alist-org/alist/releases">
|
<a href="https://github.com/Xhofe/alist/releases">
|
||||||
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||||
</a>
|
</a>
|
||||||
<a href="https://hub.docker.com/r/xhofe/alist">
|
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||||
@ -75,8 +75,6 @@
|
|||||||
- [x] [115](https://115.com/)
|
- [x] [115](https://115.com/)
|
||||||
- [X] Cloudreve
|
- [X] Cloudreve
|
||||||
- [x] [Dropbox](https://www.dropbox.com/)
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
- [x] [FeijiPan](https://www.feijipan.com/)
|
|
||||||
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
|
|
||||||
- [x] デプロイが簡単で、すぐに使える
|
- [x] デプロイが簡単で、すぐに使える
|
||||||
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
|
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
|
||||||
- [x] ギャラリーモードでの画像プレビュー
|
- [x] ギャラリーモードでの画像プレビュー
|
||||||
@ -106,7 +104,7 @@
|
|||||||
|
|
||||||
## ディスカッション
|
## ディスカッション
|
||||||
|
|
||||||
一般的なご質問は[ディスカッションフォーラム](https://github.com/alist-org/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
|
一般的なご質問は[ディスカッションフォーラム](https://github.com/Xhofe/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
|
||||||
|
|
||||||
## スポンサー
|
## スポンサー
|
||||||
|
|
||||||
@ -115,9 +113,9 @@ https://alist.nn.ci/guide/sponsor.html
|
|||||||
|
|
||||||
### スペシャルスポンサー
|
### スペシャルスポンサー
|
||||||
|
|
||||||
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||||
- [找资源](http://zhaoziyuan2.cc/) - 阿里云盘资源搜索引擎
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
## コントリビューター
|
## コントリビューター
|
||||||
|
|
||||||
@ -138,4 +136,4 @@ https://alist.nn.ci/guide/sponsor.html
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||||
|
128
build.sh
128
build.sh
@ -8,7 +8,6 @@ if [ "$1" = "dev" ]; then
|
|||||||
version="dev"
|
version="dev"
|
||||||
webVersion="dev"
|
webVersion="dev"
|
||||||
else
|
else
|
||||||
git tag -d beta
|
|
||||||
version=$(git describe --abbrev=0 --tags)
|
version=$(git describe --abbrev=0 --tags)
|
||||||
webVersion=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
webVersion=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||||
fi
|
fi
|
||||||
@ -87,60 +86,11 @@ BuildDev() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
BuildDocker() {
|
BuildDocker() {
|
||||||
|
echo "replace github.com/mattn/go-sqlite3 => github.com/leso-kn/go-sqlite3 v0.0.0-20230710125852-03158dc838ed" >>go.mod
|
||||||
|
go get gorm.io/driver/sqlite@v1.4.4
|
||||||
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
|
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
|
||||||
}
|
}
|
||||||
|
|
||||||
PrepareBuildDockerMusl() {
|
|
||||||
mkdir -p build/musl-libs
|
|
||||||
BASE="https://musl.cc/"
|
|
||||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross riscv64-linux-musl-cross powerpc64le-linux-musl-cross)
|
|
||||||
for i in "${FILES[@]}"; do
|
|
||||||
url="${BASE}${i}.tgz"
|
|
||||||
lib_tgz="build/${i}.tgz"
|
|
||||||
curl -L -o "${lib_tgz}" "${url}"
|
|
||||||
tar xf "${lib_tgz}" --strip-components 1 -C build/musl-libs
|
|
||||||
rm -f "${lib_tgz}"
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
BuildDockerMultiplatform() {
|
|
||||||
go mod download
|
|
||||||
|
|
||||||
# run PrepareBuildDockerMusl before build
|
|
||||||
export PATH=$PATH:$PWD/build/musl-libs/bin
|
|
||||||
|
|
||||||
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
|
||||||
export CGO_ENABLED=1
|
|
||||||
|
|
||||||
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x linux-riscv64 linux-ppc64le)
|
|
||||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc riscv64-linux-musl-gcc powerpc64le-linux-musl-gcc)
|
|
||||||
for i in "${!OS_ARCHES[@]}"; do
|
|
||||||
os_arch=${OS_ARCHES[$i]}
|
|
||||||
cgo_cc=${CGO_ARGS[$i]}
|
|
||||||
os=${os_arch%%-*}
|
|
||||||
arch=${os_arch##*-}
|
|
||||||
export GOOS=$os
|
|
||||||
export GOARCH=$arch
|
|
||||||
export CC=${cgo_cc}
|
|
||||||
echo "building for $os_arch"
|
|
||||||
go build -o build/$os/$arch/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
|
||||||
done
|
|
||||||
|
|
||||||
DOCKER_ARM_ARCHES=(linux-arm/v6 linux-arm/v7)
|
|
||||||
CGO_ARGS=(armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc)
|
|
||||||
GO_ARM=(6 7)
|
|
||||||
export GOOS=linux
|
|
||||||
export GOARCH=arm
|
|
||||||
for i in "${!DOCKER_ARM_ARCHES[@]}"; do
|
|
||||||
docker_arch=${DOCKER_ARM_ARCHES[$i]}
|
|
||||||
cgo_cc=${CGO_ARGS[$i]}
|
|
||||||
export GOARM=${GO_ARM[$i]}
|
|
||||||
export CC=${cgo_cc}
|
|
||||||
echo "building for $docker_arch"
|
|
||||||
go build -o build/${docker_arch%%-*}/${docker_arch##*-}/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
BuildRelease() {
|
BuildRelease() {
|
||||||
rm -rf .git/
|
rm -rf .git/
|
||||||
mkdir -p "build"
|
mkdir -p "build"
|
||||||
@ -212,50 +162,6 @@ BuildReleaseLinuxMuslArm() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
BuildReleaseAndroid() {
|
|
||||||
rm -rf .git/
|
|
||||||
mkdir -p "build"
|
|
||||||
wget https://dl.google.com/android/repository/android-ndk-r26b-linux.zip
|
|
||||||
unzip android-ndk-r26b-linux.zip
|
|
||||||
rm android-ndk-r26b-linux.zip
|
|
||||||
OS_ARCHES=(amd64 arm64 386 arm)
|
|
||||||
CGO_ARGS=(x86_64-linux-android24-clang aarch64-linux-android24-clang i686-linux-android24-clang armv7a-linux-androideabi24-clang)
|
|
||||||
for i in "${!OS_ARCHES[@]}"; do
|
|
||||||
os_arch=${OS_ARCHES[$i]}
|
|
||||||
cgo_cc=$(realpath android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/${CGO_ARGS[$i]})
|
|
||||||
echo building for android-${os_arch}
|
|
||||||
export GOOS=android
|
|
||||||
export GOARCH=${os_arch##*-}
|
|
||||||
export CC=${cgo_cc}
|
|
||||||
export CGO_ENABLED=1
|
|
||||||
go build -o ./build/$appName-android-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
|
||||||
android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip ./build/$appName-android-$os_arch
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
BuildReleaseFreeBSD() {
|
|
||||||
rm -rf .git/
|
|
||||||
mkdir -p "build/freebsd"
|
|
||||||
OS_ARCHES=(amd64 arm64 i386)
|
|
||||||
GO_ARCHES=(amd64 arm64 386)
|
|
||||||
CGO_ARGS=(x86_64-unknown-freebsd14.1 aarch64-unknown-freebsd14.1 i386-unknown-freebsd14.1)
|
|
||||||
for i in "${!OS_ARCHES[@]}"; do
|
|
||||||
os_arch=${OS_ARCHES[$i]}
|
|
||||||
cgo_cc="clang --target=${CGO_ARGS[$i]} --sysroot=/opt/freebsd/${os_arch}"
|
|
||||||
echo building for freebsd-${os_arch}
|
|
||||||
sudo mkdir -p "/opt/freebsd/${os_arch}"
|
|
||||||
wget -q https://download.freebsd.org/releases/${os_arch}/14.1-RELEASE/base.txz
|
|
||||||
sudo tar -xf ./base.txz -C /opt/freebsd/${os_arch}
|
|
||||||
rm base.txz
|
|
||||||
export GOOS=freebsd
|
|
||||||
export GOARCH=${GO_ARCHES[$i]}
|
|
||||||
export CC=${cgo_cc}
|
|
||||||
export CGO_ENABLED=1
|
|
||||||
export CGO_LDFLAGS="-fuse-ld=lld"
|
|
||||||
go build -o ./build/$appName-freebsd-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
MakeRelease() {
|
MakeRelease() {
|
||||||
cd build
|
cd build
|
||||||
mkdir compress
|
mkdir compress
|
||||||
@ -263,22 +169,12 @@ MakeRelease() {
|
|||||||
cp "$i" alist
|
cp "$i" alist
|
||||||
tar -czvf compress/"$i".tar.gz alist
|
tar -czvf compress/"$i".tar.gz alist
|
||||||
rm -f alist
|
rm -f alist
|
||||||
done
|
|
||||||
for i in $(find . -type f -name "$appName-android-*"); do
|
|
||||||
cp "$i" alist
|
|
||||||
tar -czvf compress/"$i".tar.gz alist
|
|
||||||
rm -f alist
|
|
||||||
done
|
done
|
||||||
for i in $(find . -type f -name "$appName-darwin-*"); do
|
for i in $(find . -type f -name "$appName-darwin-*"); do
|
||||||
cp "$i" alist
|
cp "$i" alist
|
||||||
tar -czvf compress/"$i".tar.gz alist
|
tar -czvf compress/"$i".tar.gz alist
|
||||||
rm -f alist
|
rm -f alist
|
||||||
done
|
done
|
||||||
for i in $(find . -type f -name "$appName-freebsd-*"); do
|
|
||||||
cp "$i" alist
|
|
||||||
tar -czvf compress/"$i".tar.gz alist
|
|
||||||
rm -f alist
|
|
||||||
done
|
|
||||||
for i in $(find . -type f -name "$appName-windows-*"); do
|
for i in $(find . -type f -name "$appName-windows-*"); do
|
||||||
cp "$i" alist.exe
|
cp "$i" alist.exe
|
||||||
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
|
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
|
||||||
@ -294,10 +190,6 @@ if [ "$1" = "dev" ]; then
|
|||||||
FetchWebDev
|
FetchWebDev
|
||||||
if [ "$2" = "docker" ]; then
|
if [ "$2" = "docker" ]; then
|
||||||
BuildDocker
|
BuildDocker
|
||||||
elif [ "$2" = "docker-multiplatform" ]; then
|
|
||||||
BuildDockerMultiplatform
|
|
||||||
elif [ "$2" = "web" ]; then
|
|
||||||
echo "web only"
|
|
||||||
else
|
else
|
||||||
BuildDev
|
BuildDev
|
||||||
fi
|
fi
|
||||||
@ -305,32 +197,16 @@ elif [ "$1" = "release" ]; then
|
|||||||
FetchWebRelease
|
FetchWebRelease
|
||||||
if [ "$2" = "docker" ]; then
|
if [ "$2" = "docker" ]; then
|
||||||
BuildDocker
|
BuildDocker
|
||||||
elif [ "$2" = "docker-multiplatform" ]; then
|
|
||||||
BuildDockerMultiplatform
|
|
||||||
elif [ "$2" = "linux_musl_arm" ]; then
|
elif [ "$2" = "linux_musl_arm" ]; then
|
||||||
BuildReleaseLinuxMuslArm
|
BuildReleaseLinuxMuslArm
|
||||||
MakeRelease "md5-linux-musl-arm.txt"
|
MakeRelease "md5-linux-musl-arm.txt"
|
||||||
elif [ "$2" = "linux_musl" ]; then
|
elif [ "$2" = "linux_musl" ]; then
|
||||||
BuildReleaseLinuxMusl
|
BuildReleaseLinuxMusl
|
||||||
MakeRelease "md5-linux-musl.txt"
|
MakeRelease "md5-linux-musl.txt"
|
||||||
elif [ "$2" = "android" ]; then
|
|
||||||
BuildReleaseAndroid
|
|
||||||
MakeRelease "md5-android.txt"
|
|
||||||
elif [ "$2" = "freebsd" ]; then
|
|
||||||
BuildReleaseFreeBSD
|
|
||||||
MakeRelease "md5-freebsd.txt"
|
|
||||||
elif [ "$2" = "web" ]; then
|
|
||||||
echo "web only"
|
|
||||||
else
|
else
|
||||||
BuildRelease
|
BuildRelease
|
||||||
MakeRelease "md5.txt"
|
MakeRelease "md5.txt"
|
||||||
fi
|
fi
|
||||||
elif [ "$1" = "prepare" ]; then
|
|
||||||
if [ "$2" = "docker-multiplatform" ]; then
|
|
||||||
PrepareBuildDockerMusl
|
|
||||||
fi
|
|
||||||
elif [ "$1" = "zip" ]; then
|
|
||||||
MakeRelease "$2".txt
|
|
||||||
else
|
else
|
||||||
echo -e "Parameter error"
|
echo -e "Parameter error"
|
||||||
fi
|
fi
|
||||||
|
@ -139,7 +139,7 @@ var LangCmd = &cobra.Command{
|
|||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
err := os.MkdirAll("lang", 0777)
|
err := os.MkdirAll("lang", 0777)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Log.Fatalf("failed create folder: %s", err.Error())
|
utils.Log.Fatal("failed create folder: %s", err.Error())
|
||||||
}
|
}
|
||||||
generateDriversJson()
|
generateDriversJson()
|
||||||
generateSettingsJson()
|
generateSettingsJson()
|
||||||
|
@ -91,27 +91,6 @@ the address is defined in config file`,
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
if conf.Conf.S3.Port != -1 && conf.Conf.S3.Enable {
|
|
||||||
s3r := gin.New()
|
|
||||||
s3r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
|
||||||
server.InitS3(s3r)
|
|
||||||
s3Base := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.S3.Port)
|
|
||||||
utils.Log.Infof("start S3 server @ %s", s3Base)
|
|
||||||
go func() {
|
|
||||||
var err error
|
|
||||||
if conf.Conf.S3.SSL {
|
|
||||||
httpsSrv = &http.Server{Addr: s3Base, Handler: s3r}
|
|
||||||
err = httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
|
||||||
}
|
|
||||||
if !conf.Conf.S3.SSL {
|
|
||||||
httpSrv = &http.Server{Addr: s3Base, Handler: s3r}
|
|
||||||
err = httpSrv.ListenAndServe()
|
|
||||||
}
|
|
||||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
|
||||||
utils.Log.Fatalf("failed to start s3 server: %s", err.Error())
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
// Wait for interrupt signal to gracefully shutdown the server with
|
// Wait for interrupt signal to gracefully shutdown the server with
|
||||||
// a timeout of 1 second.
|
// a timeout of 1 second.
|
||||||
quit := make(chan os.Signal, 1)
|
quit := make(chan os.Signal, 1)
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
package _115
|
|
||||||
|
|
||||||
import (
|
|
||||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
md5Salt = "Qclm8MGWUv59TnrR0XPg"
|
|
||||||
appVer = "27.0.5.7"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (d *Pan115) getAppVersion() ([]driver115.AppVersion, error) {
|
|
||||||
result := driver115.VersionResp{}
|
|
||||||
resp, err := base.RestyClient.R().Get(driver115.ApiGetVersion)
|
|
||||||
|
|
||||||
err = driver115.CheckErr(err, &result, resp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.Data.GetAppVersions(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) getAppVer() string {
|
|
||||||
// todo add some cache?
|
|
||||||
vers, err := d.getAppVersion()
|
|
||||||
if err != nil {
|
|
||||||
log.Warnf("[115] get app version failed: %v", err)
|
|
||||||
return appVer
|
|
||||||
}
|
|
||||||
for _, ver := range vers {
|
|
||||||
if ver.AppName == "win" {
|
|
||||||
return ver.Version
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return appVer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) initAppVer() {
|
|
||||||
appVer = d.getAppVer()
|
|
||||||
}
|
|
@ -3,7 +3,6 @@ package _115
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -17,9 +16,8 @@ import (
|
|||||||
type Pan115 struct {
|
type Pan115 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
client *driver115.Pan115Client
|
client *driver115.Pan115Client
|
||||||
limiter *rate.Limiter
|
limiter *rate.Limiter
|
||||||
appVerOnce sync.Once
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Config() driver.Config {
|
func (d *Pan115) Config() driver.Config {
|
||||||
@ -31,7 +29,6 @@ func (d *Pan115) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Init(ctx context.Context) error {
|
func (d *Pan115) Init(ctx context.Context) error {
|
||||||
d.appVerOnce.Do(d.initAppVer)
|
|
||||||
if d.LimitRate > 0 {
|
if d.LimitRate > 0 {
|
||||||
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||||
}
|
}
|
||||||
@ -66,7 +63,7 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
userAgent := args.Header.Get("User-Agent")
|
var userAgent = args.Header.Get("User-Agent")
|
||||||
downloadInfo, err := d.
|
downloadInfo, err := d.
|
||||||
DownloadWithUA(file.(*FileObj).PickCode, userAgent)
|
DownloadWithUA(file.(*FileObj).PickCode, userAgent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -79,60 +76,28 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
return link, nil
|
return link, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
|
||||||
result := driver115.MkdirResp{}
|
return err
|
||||||
form := map[string]string{
|
|
||||||
"pid": parentDir.GetID(),
|
|
||||||
"cname": dirName,
|
|
||||||
}
|
}
|
||||||
req := d.client.NewRequest().
|
return nil
|
||||||
SetFormData(form).
|
|
||||||
SetResult(&result).
|
|
||||||
ForceContentType("application/json;charset=UTF-8")
|
|
||||||
|
|
||||||
resp, err := req.Post(driver115.ApiDirAdd)
|
|
||||||
|
|
||||||
err = driver115.CheckErr(err, &result, resp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
f, err := d.getNewFile(result.FileID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return f, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
if err := d.client.Move(dstDir.GetID(), srcObj.GetID()); err != nil {
|
return d.client.Move(dstDir.GetID(), srcObj.GetID())
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
f, err := d.getNewFile(srcObj.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return f, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
if err := d.client.Rename(srcObj.GetID(), newName); err != nil {
|
return d.client.Rename(srcObj.GetID(), newName)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
f, err := d.getNewFile((srcObj.GetID()))
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return f, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
@ -149,9 +114,9 @@ func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
return d.client.Delete(obj.GetID())
|
return d.client.Delete(obj.GetID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -160,10 +125,10 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
)
|
)
|
||||||
|
|
||||||
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
|
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
|
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
|
||||||
return nil, driver115.ErrUploadTooLarge
|
return driver115.ErrUploadTooLarge
|
||||||
}
|
}
|
||||||
//if digest, err = d.client.GetDigestResult(stream); err != nil {
|
//if digest, err = d.client.GetDigestResult(stream); err != nil {
|
||||||
// return err
|
// return err
|
||||||
@ -176,22 +141,22 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
}
|
}
|
||||||
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
preHash, err := utils.HashReader(utils.SHA1, reader)
|
preHash, err := utils.HashReader(utils.SHA1, reader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
preHash = strings.ToUpper(preHash)
|
preHash = strings.ToUpper(preHash)
|
||||||
fullHash := stream.GetHash().GetHash(utils.SHA1)
|
fullHash := stream.GetHash().GetHash(utils.SHA1)
|
||||||
if len(fullHash) <= 0 {
|
if len(fullHash) <= 0 {
|
||||||
tmpF, err := stream.CacheFullInTempFile()
|
tmpF, err := stream.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
fullHash, err = utils.HashFile(utils.SHA1, tmpF)
|
fullHash, err = utils.HashFile(utils.SHA1, tmpF)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fullHash = strings.ToUpper(fullHash)
|
fullHash = strings.ToUpper(fullHash)
|
||||||
@ -200,52 +165,21 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
// note that 115 add timeout for rapid-upload,
|
// note that 115 add timeout for rapid-upload,
|
||||||
// and "sig invalid" err is thrown even when the hash is correct after timeout.
|
// and "sig invalid" err is thrown even when the hash is correct after timeout.
|
||||||
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
|
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
if matched, err := fastInfo.Ok(); err != nil {
|
if matched, err := fastInfo.Ok(); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
} else if matched {
|
} else if matched {
|
||||||
f, err := d.getNewFileByPickCode(fastInfo.PickCode)
|
return nil
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return f, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var uploadResult *UploadResult
|
|
||||||
// 闪传失败,上传
|
// 闪传失败,上传
|
||||||
if stream.GetSize() <= 10*utils.MB { // 文件大小小于10MB,改用普通模式上传
|
if stream.GetSize() <= utils.KB { // 文件大小小于1KB,改用普通模式上传
|
||||||
if uploadResult, err = d.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID); err != nil {
|
return d.client.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// 分片上传
|
|
||||||
if uploadResult, err = d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
// 分片上传
|
||||||
|
return d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID)
|
||||||
|
|
||||||
file, err := d.getNewFile(uploadResult.Data.FileID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return file, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) OfflineList(ctx context.Context) ([]*driver115.OfflineTask, error) {
|
|
||||||
resp, err := d.client.ListOfflineTask(0)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return resp.Tasks, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) OfflineDownload(ctx context.Context, uris []string, dstDir model.Obj) ([]string, error) {
|
|
||||||
return d.client.AddOfflineTaskURIs(uris, dstDir.GetID())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) DeleteOfflineTasks(ctx context.Context, hashes []string, deleteFiles bool) error {
|
|
||||||
return d.client.DeleteOfflineTasks(hashes, deleteFiles)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Pan115)(nil)
|
var _ driver.Driver = (*Pan115)(nil)
|
||||||
|
@ -6,20 +6,19 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
||||||
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate ([limit]r/1s)"`
|
|
||||||
driver.RootID
|
driver.RootID
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "115 Cloud",
|
Name: "115 Cloud",
|
||||||
DefaultRoot: "0",
|
DefaultRoot: "0",
|
||||||
// OnlyProxy: true,
|
//OnlyProxy: true,
|
||||||
// OnlyLocal: true,
|
//OnlyLocal: true,
|
||||||
// NoOverwriteUpload: true,
|
NoOverwriteUpload: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
package _115
|
package _115
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ model.Obj = (*FileObj)(nil)
|
var _ model.Obj = (*FileObj)(nil)
|
||||||
@ -21,18 +20,3 @@ func (f *FileObj) CreateTime() time.Time {
|
|||||||
func (f *FileObj) GetHash() utils.HashInfo {
|
func (f *FileObj) GetHash() utils.HashInfo {
|
||||||
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
||||||
}
|
}
|
||||||
|
|
||||||
type UploadResult struct {
|
|
||||||
driver.BasicResp
|
|
||||||
Data struct {
|
|
||||||
PickCode string `json:"pick_code"`
|
|
||||||
FileSize int `json:"file_size"`
|
|
||||||
FileID string `json:"file_id"`
|
|
||||||
ThumbURL string `json:"thumb_url"`
|
|
||||||
Sha1 string `json:"sha1"`
|
|
||||||
Aid int `json:"aid"`
|
|
||||||
FileName string `json:"file_name"`
|
|
||||||
Cid string `json:"cid"`
|
|
||||||
IsVideo int `json:"is_video"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
@ -2,14 +2,13 @@ package _115
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"crypto/md5"
|
|
||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
"encoding/hex"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
@ -27,29 +26,29 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
//var UserAgent = driver115.UA115Browser
|
var UserAgent = driver115.UA115Desktop
|
||||||
|
|
||||||
func (d *Pan115) login() error {
|
func (d *Pan115) login() error {
|
||||||
var err error
|
var err error
|
||||||
opts := []driver115.Option{
|
opts := []driver115.Option{
|
||||||
driver115.UA(d.getUA()),
|
driver115.UA(UserAgent),
|
||||||
func(c *driver115.Pan115Client) {
|
func(c *driver115.Pan115Client) {
|
||||||
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
d.client = driver115.New(opts...)
|
d.client = driver115.New(opts...)
|
||||||
cr := &driver115.Credential{}
|
cr := &driver115.Credential{}
|
||||||
if d.QRCodeToken != "" {
|
if d.Addition.QRCodeToken != "" {
|
||||||
s := &driver115.QRCodeSession{
|
s := &driver115.QRCodeSession{
|
||||||
UID: d.QRCodeToken,
|
UID: d.Addition.QRCodeToken,
|
||||||
}
|
}
|
||||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
if cr, err = d.client.QRCodeLogin(s); err != nil {
|
||||||
return errors.Wrap(err, "failed to login by qrcode")
|
return errors.Wrap(err, "failed to login by qrcode")
|
||||||
}
|
}
|
||||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||||
d.QRCodeToken = ""
|
d.Addition.QRCodeToken = ""
|
||||||
} else if d.Cookie != "" {
|
} else if d.Addition.Cookie != "" {
|
||||||
if err = cr.FromCookie(d.Cookie); err != nil {
|
if err = cr.FromCookie(d.Addition.Cookie); err != nil {
|
||||||
return errors.Wrap(err, "failed to login by cookies")
|
return errors.Wrap(err, "failed to login by cookies")
|
||||||
}
|
}
|
||||||
d.client.ImportCredential(cr)
|
d.client.ImportCredential(cr)
|
||||||
@ -74,39 +73,11 @@ func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) getNewFile(fileId string) (*FileObj, error) {
|
const (
|
||||||
file, err := d.client.GetFile(fileId)
|
appVer = "2.0.3.6"
|
||||||
if err != nil {
|
)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &FileObj{*file}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) getNewFileByPickCode(pickCode string) (*FileObj, error) {
|
func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
||||||
result := driver115.GetFileInfoResponse{}
|
|
||||||
req := d.client.NewRequest().
|
|
||||||
SetQueryParam("pick_code", pickCode).
|
|
||||||
ForceContentType("application/json;charset=UTF-8").
|
|
||||||
SetResult(&result)
|
|
||||||
resp, err := req.Get(driver115.ApiFileInfo)
|
|
||||||
if err := driver115.CheckErr(err, &result, resp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if len(result.Files) == 0 {
|
|
||||||
return nil, errors.New("not get file info")
|
|
||||||
}
|
|
||||||
fileInfo := result.Files[0]
|
|
||||||
|
|
||||||
f := &FileObj{}
|
|
||||||
f.From(fileInfo)
|
|
||||||
return f, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) getUA() string {
|
|
||||||
return fmt.Sprintf("Mozilla/5.0 115Browser/%s", appVer)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
|
||||||
key := crypto.GenerateKey()
|
key := crypto.GenerateKey()
|
||||||
result := driver115.DownloadResp{}
|
result := driver115.DownloadResp{}
|
||||||
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
|
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
|
||||||
@ -120,10 +91,10 @@ func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, e
|
|||||||
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
|
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
|
||||||
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
||||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||||
req.Header.Set("Cookie", d.Cookie)
|
req.Header.Set("Cookie", c.Cookie)
|
||||||
req.Header.Set("User-Agent", ua)
|
req.Header.Set("User-Agent", ua)
|
||||||
|
|
||||||
resp, err := d.client.Client.GetClient().Do(req)
|
resp, err := c.client.Client.GetClient().Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -161,13 +132,6 @@ func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, e
|
|||||||
return nil, driver115.ErrUnexpected
|
return nil, driver115.ErrUnexpected
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Pan115) GenerateToken(fileID, preID, timeStamp, fileSize, signKey, signVal string) string {
|
|
||||||
userID := strconv.FormatInt(c.client.UserID, 10)
|
|
||||||
userIDMd5 := md5.Sum([]byte(userID))
|
|
||||||
tokenMd5 := md5.Sum([]byte(md5Salt + fileID + fileSize + signKey + signVal + userID + timeStamp + hex.EncodeToString(userIDMd5[:]) + appVer))
|
|
||||||
return hex.EncodeToString(tokenMd5[:])
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
||||||
var (
|
var (
|
||||||
ecdhCipher *cipher.EcdhCipher
|
ecdhCipher *cipher.EcdhCipher
|
||||||
@ -197,7 +161,7 @@ func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID stri
|
|||||||
|
|
||||||
signKey, signVal := "", ""
|
signKey, signVal := "", ""
|
||||||
for retry := true; retry; {
|
for retry := true; retry; {
|
||||||
t := driver115.NowMilli()
|
t := driver115.Now()
|
||||||
|
|
||||||
if encodedToken, err = ecdhCipher.EncodeToken(t.ToInt64()); err != nil {
|
if encodedToken, err = ecdhCipher.EncodeToken(t.ToInt64()); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -208,7 +172,7 @@ func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID stri
|
|||||||
}
|
}
|
||||||
|
|
||||||
form.Set("t", t.String())
|
form.Set("t", t.String())
|
||||||
form.Set("token", d.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
form.Set("token", d.client.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
||||||
if signKey != "" && signVal != "" {
|
if signKey != "" && signVal != "" {
|
||||||
form.Set("sign_key", signKey)
|
form.Set("sign_key", signKey)
|
||||||
form.Set("sign_val", signVal)
|
form.Set("sign_val", signVal)
|
||||||
@ -261,9 +225,6 @@ func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result stri
|
|||||||
|
|
||||||
length := end - start + 1
|
length := end - start + 1
|
||||||
reader, err := stream.RangeRead(http_range.Range{Start: start, Length: length})
|
reader, err := stream.RangeRead(http_range.Range{Start: start, Length: length})
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
hashStr, err := utils.HashReader(utils.SHA1, reader)
|
hashStr, err := utils.HashReader(utils.SHA1, reader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
@ -272,38 +233,8 @@ func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result stri
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// UploadByOSS use aliyun sdk to upload
|
|
||||||
func (c *Pan115) UploadByOSS(params *driver115.UploadOSSParams, r io.Reader, dirID string) (*UploadResult, error) {
|
|
||||||
ossToken, err := c.client.GetOSSToken()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
ossClient, err := oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
bucket, err := ossClient.Bucket(params.Bucket)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var bodyBytes []byte
|
|
||||||
if err = bucket.PutObject(params.Object, r, append(
|
|
||||||
driver115.OssOption(params, ossToken),
|
|
||||||
oss.CallbackResult(&bodyBytes),
|
|
||||||
)...); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var uploadResult UploadResult
|
|
||||||
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
// UploadByMultipart upload by mutipart blocks
|
// UploadByMultipart upload by mutipart blocks
|
||||||
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) (*UploadResult, error) {
|
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) error {
|
||||||
var (
|
var (
|
||||||
chunks []oss.FileChunk
|
chunks []oss.FileChunk
|
||||||
parts []oss.UploadPart
|
parts []oss.UploadPart
|
||||||
@ -311,13 +242,12 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
ossClient *oss.Client
|
ossClient *oss.Client
|
||||||
bucket *oss.Bucket
|
bucket *oss.Bucket
|
||||||
ossToken *driver115.UploadOSSTokenResp
|
ossToken *driver115.UploadOSSTokenResp
|
||||||
bodyBytes []byte
|
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
tmpF, err := stream.CacheFullInTempFile()
|
tmpF, err := stream.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
options := driver115.DefalutUploadMultipartOptions()
|
options := driver115.DefalutUploadMultipartOptions()
|
||||||
@ -326,19 +256,17 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
f(options)
|
f(options)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// oss 启用Sequential必须按顺序上传
|
|
||||||
options.ThreadsNum = 1
|
|
||||||
|
|
||||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret, oss.EnableMD5(true), oss.EnableCRC(true)); err != nil {
|
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
|
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// ossToken一小时后就会失效,所以每50分钟重新获取一次
|
// ossToken一小时后就会失效,所以每50分钟重新获取一次
|
||||||
@ -348,15 +276,14 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
timeout := time.NewTimer(options.Timeout)
|
timeout := time.NewTimer(options.Timeout)
|
||||||
|
|
||||||
if chunks, err = SplitFile(fileSize); err != nil {
|
if chunks, err = SplitFile(fileSize); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if imur, err = bucket.InitiateMultipartUpload(params.Object,
|
if imur, err = bucket.InitiateMultipartUpload(params.Object,
|
||||||
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
|
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
|
||||||
oss.UserAgentHeader(driver115.OSSUserAgent),
|
oss.UserAgentHeader(driver115.OSSUserAgent),
|
||||||
oss.EnableSha1(), oss.Sequential(),
|
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
wg := sync.WaitGroup{}
|
wg := sync.WaitGroup{}
|
||||||
@ -398,7 +325,8 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if part, err = bucket.UploadPart(imur, bytes.NewBuffer(buf), chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
b := bytes.NewBuffer(buf)
|
||||||
|
if part, err = bucket.UploadPart(imur, b, chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -422,38 +350,51 @@ LOOP:
|
|||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
// 到时重新获取ossToken
|
// 到时重新获取ossToken
|
||||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
case <-quit:
|
case <-quit:
|
||||||
break LOOP
|
break LOOP
|
||||||
case <-errCh:
|
case <-errCh:
|
||||||
return nil, err
|
return err
|
||||||
case <-timeout.C:
|
case <-timeout.C:
|
||||||
return nil, fmt.Errorf("time out")
|
return fmt.Errorf("time out")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 不知道啥原因,oss那边分片上传不计算sha1,导致115服务器校验错误
|
// EOF错误是xml的Unmarshal导致的,响应其实是json格式,所以实际上上传是成功的
|
||||||
// params.Callback.Callback = strings.ReplaceAll(params.Callback.Callback, "${sha1}", params.SHA1)
|
if _, err = bucket.CompleteMultipartUpload(imur, parts, driver115.OssOption(params, ossToken)...); err != nil && !errors.Is(err, io.EOF) {
|
||||||
if _, err := bucket.CompleteMultipartUpload(imur, parts, append(
|
// 当文件名含有 &< 这两个字符之一时响应的xml解析会出现错误,实际上上传是成功的
|
||||||
driver115.OssOption(params, ossToken),
|
if filename := filepath.Base(stream.GetName()); !strings.ContainsAny(filename, "&<") {
|
||||||
oss.CallbackResult(&bodyBytes),
|
return err
|
||||||
)...); err != nil {
|
}
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
|
return d.checkUploadStatus(dirID, params.SHA1)
|
||||||
var uploadResult UploadResult
|
|
||||||
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
||||||
for _, chunk := range chunks {
|
for _, chunk := range chunks {
|
||||||
ch <- chunk
|
ch <- chunk
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
func (d *Pan115) checkUploadStatus(dirID, sha1 string) error {
|
||||||
|
// 验证上传是否成功
|
||||||
|
req := d.client.NewRequest().ForceContentType("application/json;charset=UTF-8")
|
||||||
|
opts := []driver115.GetFileOptions{
|
||||||
|
driver115.WithOrder(driver115.FileOrderByTime),
|
||||||
|
driver115.WithShowDirEnable(false),
|
||||||
|
driver115.WithAsc(false),
|
||||||
|
driver115.WithLimit(500),
|
||||||
|
}
|
||||||
|
fResp, err := driver115.GetFiles(req, dirID, opts...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, fileInfo := range fResp.Files {
|
||||||
|
if fileInfo.Sha1 == sha1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return driver115.ErrUploadFailed
|
||||||
|
}
|
||||||
|
|
||||||
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
|
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
|
||||||
for i := int64(1); i < 10; i++ {
|
for i := int64(1); i < 10; i++ {
|
||||||
@ -490,8 +431,8 @@ func SplitFileByPartNum(fileSize int64, chunkNum int) ([]oss.FileChunk, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var chunks []oss.FileChunk
|
var chunks []oss.FileChunk
|
||||||
chunk := oss.FileChunk{}
|
var chunk = oss.FileChunk{}
|
||||||
chunkN := (int64)(chunkNum)
|
var chunkN = (int64)(chunkNum)
|
||||||
for i := int64(0); i < chunkN; i++ {
|
for i := int64(0); i < chunkN; i++ {
|
||||||
chunk.Number = int(i + 1)
|
chunk.Number = int(i + 1)
|
||||||
chunk.Offset = i * (fileSize / chunkN)
|
chunk.Offset = i * (fileSize / chunkN)
|
||||||
@ -513,13 +454,13 @@ func SplitFileByPartSize(fileSize int64, chunkSize int64) ([]oss.FileChunk, erro
|
|||||||
return nil, errors.New("chunkSize invalid")
|
return nil, errors.New("chunkSize invalid")
|
||||||
}
|
}
|
||||||
|
|
||||||
chunkN := fileSize / chunkSize
|
var chunkN = fileSize / chunkSize
|
||||||
if chunkN >= 10000 {
|
if chunkN >= 10000 {
|
||||||
return nil, errors.New("Too many parts, please increase part size")
|
return nil, errors.New("Too many parts, please increase part size")
|
||||||
}
|
}
|
||||||
|
|
||||||
var chunks []oss.FileChunk
|
var chunks []oss.FileChunk
|
||||||
chunk := oss.FileChunk{}
|
var chunk = oss.FileChunk{}
|
||||||
for i := int64(0); i < chunkN; i++ {
|
for i := int64(0); i < chunkN; i++ {
|
||||||
chunk.Number = int(i + 1)
|
chunk.Number = int(i + 1)
|
||||||
chunk.Offset = i * chunkSize
|
chunk.Offset = i * chunkSize
|
||||||
|
@ -6,13 +6,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
PageSize int64 `json:"page_size" type:"number" default:"20" help:"list api per page size of 115 driver"`
|
||||||
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
||||||
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
||||||
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
|
||||||
driver.RootID
|
driver.RootID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ func (d *Pan115Share) login() error {
|
|||||||
s := &driver115.QRCodeSession{
|
s := &driver115.QRCodeSession{
|
||||||
UID: d.QRCodeToken,
|
UID: d.QRCodeToken,
|
||||||
}
|
}
|
||||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
if cr, err = d.client.QRCodeLogin(s); err != nil {
|
||||||
return errors.Wrap(err, "failed to login by qrcode")
|
return errors.Wrap(err, "failed to login by qrcode")
|
||||||
}
|
}
|
||||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||||
|
@ -6,13 +6,6 @@ import (
|
|||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
"golang.org/x/time/rate"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -24,12 +17,14 @@ import (
|
|||||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Pan123 struct {
|
type Pan123 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
apiRateLimit sync.Map
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) Config() driver.Config {
|
func (d *Pan123) Config() driver.Config {
|
||||||
@ -53,7 +48,7 @@ func (d *Pan123) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *Pan123) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
files, err := d.getFiles(ctx, dir.GetID(), dir.GetName())
|
files, err := d.getFiles(dir.GetID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -82,7 +77,6 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
"type": f.Type,
|
"type": f.Type,
|
||||||
}
|
}
|
||||||
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||||
|
|
||||||
req.SetBody(data).SetHeaders(headers)
|
req.SetBody(data).SetHeaders(headers)
|
||||||
}, nil)
|
}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -195,7 +189,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
defer func() {
|
defer func() {
|
||||||
_ = tempFile.Close()
|
_ = tempFile.Close()
|
||||||
}()
|
}()
|
||||||
if _, err = utils.CopyWithBuffer(h, tempFile); err != nil {
|
if _, err = io.Copy(h, tempFile); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_, err = tempFile.Seek(0, io.SeekStart)
|
_, err = tempFile.Seek(0, io.SeekStart)
|
||||||
@ -238,9 +232,6 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
uploader := s3manager.NewUploader(s)
|
uploader := s3manager.NewUploader(s)
|
||||||
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
|
||||||
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
|
||||||
}
|
|
||||||
input := &s3manager.UploadInput{
|
input := &s3manager.UploadInput{
|
||||||
Bucket: &resp.Data.Bucket,
|
Bucket: &resp.Data.Bucket,
|
||||||
Key: &resp.Data.Key,
|
Key: &resp.Data.Key,
|
||||||
@ -248,6 +239,9 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
}
|
}
|
||||||
_, err = uploader.UploadWithContext(ctx, input)
|
_, err = uploader.UploadWithContext(ctx, input)
|
||||||
}
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
_, err = d.request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
_, err = d.request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"fileId": resp.Data.FileId,
|
"fileId": resp.Data.FileId,
|
||||||
@ -256,12 +250,4 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) APIRateLimit(ctx context.Context, api string) error {
|
|
||||||
value, _ := d.apiRateLimit.LoadOrStore(api,
|
|
||||||
rate.NewLimiter(rate.Every(700*time.Millisecond), 1))
|
|
||||||
limiter := value.(*rate.Limiter)
|
|
||||||
|
|
||||||
return limiter.Wait(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*Pan123)(nil)
|
var _ driver.Driver = (*Pan123)(nil)
|
||||||
|
@ -9,15 +9,14 @@ type Addition struct {
|
|||||||
Username string `json:"username" required:"true"`
|
Username string `json:"username" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
//OrderBy string `json:"order_by" type:"select" options:"file_id,file_name,size,update_at" default:"file_name"`
|
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||||
//OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
AccessToken string
|
AccessToken string
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "123Pan",
|
Name: "123Pan",
|
||||||
DefaultRoot: "0",
|
DefaultRoot: "0",
|
||||||
LocalSort: true,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -87,9 +87,8 @@ var _ model.Thumb = (*File)(nil)
|
|||||||
type Files struct {
|
type Files struct {
|
||||||
//BaseResp
|
//BaseResp
|
||||||
Data struct {
|
Data struct {
|
||||||
Next string `json:"Next"`
|
|
||||||
Total int `json:"Total"`
|
|
||||||
InfoList []File `json:"InfoList"`
|
InfoList []File `json:"InfoList"`
|
||||||
|
Next string `json:"Next"`
|
||||||
} `json:"data"`
|
} `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
package _123
|
package _123
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"hash/crc32"
|
"hash/crc32"
|
||||||
@ -15,9 +14,8 @@ import (
|
|||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
resty "github.com/go-resty/resty/v2"
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
@ -26,9 +24,8 @@ const (
|
|||||||
Api = "https://www.123pan.com/api"
|
Api = "https://www.123pan.com/api"
|
||||||
AApi = "https://www.123pan.com/a/api"
|
AApi = "https://www.123pan.com/a/api"
|
||||||
BApi = "https://www.123pan.com/b/api"
|
BApi = "https://www.123pan.com/b/api"
|
||||||
LoginApi = "https://login.123pan.com/api"
|
|
||||||
MainApi = BApi
|
MainApi = BApi
|
||||||
SignIn = LoginApi + "/user/sign_in"
|
SignIn = MainApi + "/user/sign_in"
|
||||||
Logout = MainApi + "/user/logout"
|
Logout = MainApi + "/user/logout"
|
||||||
UserInfo = MainApi + "/user/info"
|
UserInfo = MainApi + "/user/info"
|
||||||
FileList = MainApi + "/file/list/new"
|
FileList = MainApi + "/file/list/new"
|
||||||
@ -163,7 +160,7 @@ func (d *Pan123) login() error {
|
|||||||
SetHeaders(map[string]string{
|
SetHeaders(map[string]string{
|
||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
"referer": "https://www.123pan.com/",
|
"referer": "https://www.123pan.com/",
|
||||||
"user-agent": "Dart/2.19(dart:io)-alist",
|
"user-agent": "Dart/2.19(dart:io)",
|
||||||
"platform": "web",
|
"platform": "web",
|
||||||
"app-version": "3",
|
"app-version": "3",
|
||||||
//"user-agent": base.UserAgent,
|
//"user-agent": base.UserAgent,
|
||||||
@ -200,7 +197,7 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
|||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
"referer": "https://www.123pan.com/",
|
"referer": "https://www.123pan.com/",
|
||||||
"authorization": "Bearer " + d.AccessToken,
|
"authorization": "Bearer " + d.AccessToken,
|
||||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) alist-client",
|
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0",
|
||||||
"platform": "web",
|
"platform": "web",
|
||||||
"app-version": "3",
|
"app-version": "3",
|
||||||
//"user-agent": base.UserAgent,
|
//"user-agent": base.UserAgent,
|
||||||
@ -235,22 +232,17 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
|||||||
return body, nil
|
return body, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) getFiles(ctx context.Context, parentId string, name string) ([]File, error) {
|
func (d *Pan123) getFiles(parentId string) ([]File, error) {
|
||||||
page := 1
|
page := 1
|
||||||
total := 0
|
|
||||||
res := make([]File, 0)
|
res := make([]File, 0)
|
||||||
// 2024-02-06 fix concurrency by 123pan
|
|
||||||
for {
|
for {
|
||||||
if err := d.APIRateLimit(ctx, FileList); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
var resp Files
|
var resp Files
|
||||||
query := map[string]string{
|
query := map[string]string{
|
||||||
"driveId": "0",
|
"driveId": "0",
|
||||||
"limit": "100",
|
"limit": "100",
|
||||||
"next": "0",
|
"next": "0",
|
||||||
"orderBy": "file_id",
|
"orderBy": d.OrderBy,
|
||||||
"orderDirection": "desc",
|
"orderDirection": d.OrderDirection,
|
||||||
"parentFileId": parentId,
|
"parentFileId": parentId,
|
||||||
"trashed": "false",
|
"trashed": "false",
|
||||||
"SearchData": "",
|
"SearchData": "",
|
||||||
@ -260,22 +252,17 @@ func (d *Pan123) getFiles(ctx context.Context, parentId string, name string) ([]
|
|||||||
"operateType": "4",
|
"operateType": "4",
|
||||||
"inDirectSpace": "false",
|
"inDirectSpace": "false",
|
||||||
}
|
}
|
||||||
_res, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
log.Debug(string(_res))
|
|
||||||
page++
|
page++
|
||||||
res = append(res, resp.Data.InfoList...)
|
res = append(res, resp.Data.InfoList...)
|
||||||
total = resp.Data.Total
|
|
||||||
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
|
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(res) != total {
|
|
||||||
log.Warnf("incorrect file count from remote at %s: expected %d, got %d", name, total, len(res))
|
|
||||||
}
|
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
@ -4,11 +4,8 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"golang.org/x/time/rate"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -22,7 +19,6 @@ import (
|
|||||||
type Pan123Share struct {
|
type Pan123Share struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
apiRateLimit sync.Map
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123Share) Config() driver.Config {
|
func (d *Pan123Share) Config() driver.Config {
|
||||||
@ -45,7 +41,7 @@ func (d *Pan123Share) Drop(ctx context.Context) error {
|
|||||||
|
|
||||||
func (d *Pan123Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *Pan123Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
// TODO return the files list, required
|
// TODO return the files list, required
|
||||||
files, err := d.getFiles(ctx, dir.GetID())
|
files, err := d.getFiles(dir.GetID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -150,12 +146,4 @@ func (d *Pan123Share) Put(ctx context.Context, dstDir model.Obj, stream model.Fi
|
|||||||
// return nil, errs.NotSupport
|
// return nil, errs.NotSupport
|
||||||
//}
|
//}
|
||||||
|
|
||||||
func (d *Pan123Share) APIRateLimit(ctx context.Context, api string) error {
|
|
||||||
value, _ := d.apiRateLimit.LoadOrStore(api,
|
|
||||||
rate.NewLimiter(rate.Every(700*time.Millisecond), 1))
|
|
||||||
limiter := value.(*rate.Limiter)
|
|
||||||
|
|
||||||
return limiter.Wait(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*Pan123Share)(nil)
|
var _ driver.Driver = (*Pan123Share)(nil)
|
||||||
|
@ -7,11 +7,10 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
ShareKey string `json:"sharekey" required:"true"`
|
ShareKey string `json:"sharekey" required:"true"`
|
||||||
SharePwd string `json:"sharepassword"`
|
SharePwd string `json:"sharepassword" required:"true"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
//OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||||
//OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
AccessToken string `json:"accesstoken" type:"text"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -1,17 +1,9 @@
|
|||||||
package _123Share
|
package _123Share
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"hash/crc32"
|
|
||||||
"math"
|
|
||||||
"math/rand"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
@ -23,45 +15,20 @@ const (
|
|||||||
Api = "https://www.123pan.com/api"
|
Api = "https://www.123pan.com/api"
|
||||||
AApi = "https://www.123pan.com/a/api"
|
AApi = "https://www.123pan.com/a/api"
|
||||||
BApi = "https://www.123pan.com/b/api"
|
BApi = "https://www.123pan.com/b/api"
|
||||||
MainApi = BApi
|
MainApi = Api
|
||||||
FileList = MainApi + "/share/get"
|
FileList = MainApi + "/share/get"
|
||||||
DownloadInfo = MainApi + "/share/download/info"
|
DownloadInfo = MainApi + "/share/download/info"
|
||||||
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||||
)
|
)
|
||||||
|
|
||||||
func signPath(path string, os string, version string) (k string, v string) {
|
|
||||||
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
|
|
||||||
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
|
|
||||||
now := time.Now().In(time.FixedZone("CST", 8*3600))
|
|
||||||
timestamp := fmt.Sprint(now.Unix())
|
|
||||||
nowStr := []byte(now.Format("200601021504"))
|
|
||||||
for i := 0; i < len(nowStr); i++ {
|
|
||||||
nowStr[i] = table[nowStr[i]-48]
|
|
||||||
}
|
|
||||||
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
|
|
||||||
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
|
|
||||||
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
|
|
||||||
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetApi(rawUrl string) string {
|
|
||||||
u, _ := url.Parse(rawUrl)
|
|
||||||
query := u.Query()
|
|
||||||
query.Add(signPath(u.Path, "web", "3"))
|
|
||||||
u.RawQuery = query.Encode()
|
|
||||||
return u.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
"referer": "https://www.123pan.com/",
|
"referer": "https://www.123pan.com/",
|
||||||
"authorization": "Bearer " + d.AccessToken,
|
"user-agent": "Dart/2.19(dart:io)",
|
||||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) alist-client",
|
"platform": "android",
|
||||||
"platform": "web",
|
"app-version": "36",
|
||||||
"app-version": "3",
|
|
||||||
//"user-agent": base.UserAgent,
|
|
||||||
})
|
})
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
@ -69,7 +36,7 @@ func (d *Pan123Share) request(url string, method string, callback base.ReqCallba
|
|||||||
if resp != nil {
|
if resp != nil {
|
||||||
req.SetResult(resp)
|
req.SetResult(resp)
|
||||||
}
|
}
|
||||||
res, err := req.Execute(method, GetApi(url))
|
res, err := req.Execute(method, url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -81,19 +48,16 @@ func (d *Pan123Share) request(url string, method string, callback base.ReqCallba
|
|||||||
return body, nil
|
return body, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123Share) getFiles(ctx context.Context, parentId string) ([]File, error) {
|
func (d *Pan123Share) getFiles(parentId string) ([]File, error) {
|
||||||
page := 1
|
page := 1
|
||||||
res := make([]File, 0)
|
res := make([]File, 0)
|
||||||
for {
|
for {
|
||||||
if err := d.APIRateLimit(ctx, FileList); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
var resp Files
|
var resp Files
|
||||||
query := map[string]string{
|
query := map[string]string{
|
||||||
"limit": "100",
|
"limit": "100",
|
||||||
"next": "0",
|
"next": "0",
|
||||||
"orderBy": "file_id",
|
"orderBy": d.OrderBy,
|
||||||
"orderDirection": "desc",
|
"orderDirection": d.OrderDirection,
|
||||||
"parentFileId": parentId,
|
"parentFileId": parentId,
|
||||||
"Page": strconv.Itoa(page),
|
"Page": strconv.Itoa(page),
|
||||||
"shareKey": d.ShareKey,
|
"shareKey": d.ShareKey,
|
||||||
|
@ -8,21 +8,18 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/pkg/cron"
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Yun139 struct {
|
type Yun139 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
cron *cron.Cron
|
|
||||||
Account string
|
Account string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,13 +35,6 @@ func (d *Yun139) Init(ctx context.Context) error {
|
|||||||
if d.Authorization == "" {
|
if d.Authorization == "" {
|
||||||
return fmt.Errorf("authorization is empty")
|
return fmt.Errorf("authorization is empty")
|
||||||
}
|
}
|
||||||
d.cron = cron.NewCron(time.Hour * 24 * 7)
|
|
||||||
d.cron.Do(func() {
|
|
||||||
err := d.refreshToken()
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("%+v", err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
switch d.Addition.Type {
|
switch d.Addition.Type {
|
||||||
case MetaPersonalNew:
|
case MetaPersonalNew:
|
||||||
if len(d.Addition.RootFolderID) == 0 {
|
if len(d.Addition.RootFolderID) == 0 {
|
||||||
@ -82,9 +72,6 @@ func (d *Yun139) Init(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Drop(ctx context.Context) error {
|
func (d *Yun139) Drop(ctx context.Context) error {
|
||||||
if d.cron != nil {
|
|
||||||
d.cron.Stop()
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,15 +14,12 @@ type Addition struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "139Yun",
|
Name: "139Yun",
|
||||||
LocalSort: true,
|
LocalSort: true,
|
||||||
ProxyRangeOption: true,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
d := &Yun139{}
|
return &Yun139{}
|
||||||
d.ProxyRange = true
|
|
||||||
return d
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,5 @@
|
|||||||
package _139
|
package _139
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/xml"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
MetaPersonal string = "personal"
|
MetaPersonal string = "personal"
|
||||||
MetaFamily string = "family"
|
MetaFamily string = "family"
|
||||||
@ -234,12 +230,3 @@ type PersonalUploadResp struct {
|
|||||||
UploadId string `json:"uploadId"`
|
UploadId string `json:"uploadId"`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type RefreshTokenResp struct {
|
|
||||||
XMLName xml.Name `xml:"root"`
|
|
||||||
Return string `xml:"return"`
|
|
||||||
Token string `xml:"token"`
|
|
||||||
Expiretime int32 `xml:"expiretime"`
|
|
||||||
AccessToken string `xml:"accessToken"`
|
|
||||||
Desc string `xml:"desc"`
|
|
||||||
}
|
|
||||||
|
@ -15,7 +15,6 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils/random"
|
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@ -53,32 +52,6 @@ func getTime(t string) time.Time {
|
|||||||
return stamp
|
return stamp
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) refreshToken() error {
|
|
||||||
url := "https://aas.caiyun.feixin.10086.cn:443/tellin/authTokenRefresh.do"
|
|
||||||
var resp RefreshTokenResp
|
|
||||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
decodeStr := string(decode)
|
|
||||||
splits := strings.Split(decodeStr, ":")
|
|
||||||
reqBody := "<root><token>" + splits[2] + "</token><account>" + splits[1] + "</account><clienttype>656</clienttype></root>"
|
|
||||||
_, err = base.RestyClient.R().
|
|
||||||
ForceContentType("application/xml").
|
|
||||||
SetBody(reqBody).
|
|
||||||
SetResult(&resp).
|
|
||||||
Post(url)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if resp.Return != "0" {
|
|
||||||
return fmt.Errorf("failed to refresh token: %s", resp.Desc)
|
|
||||||
}
|
|
||||||
d.Authorization = base64.StdEncoding.EncodeToString([]byte(splits[0] + ":" + splits[1] + ":" + resp.Token))
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Yun139) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *Yun139) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
url := "https://yun.139.com" + pathname
|
url := "https://yun.139.com" + pathname
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
package _189pc
|
package _189pc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"container/ring"
|
|
||||||
"context"
|
"context"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
@ -29,9 +28,6 @@ type Cloud189PC struct {
|
|||||||
|
|
||||||
uploadThread int
|
uploadThread int
|
||||||
|
|
||||||
familyTransferFolder *ring.Ring
|
|
||||||
cleanFamilyTransferFile func()
|
|
||||||
|
|
||||||
storageConfig driver.Config
|
storageConfig driver.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -56,6 +52,7 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
|||||||
}
|
}
|
||||||
if !y.isFamily() && y.RootFolderID == "" {
|
if !y.isFamily() && y.RootFolderID == "" {
|
||||||
y.RootFolderID = "-11"
|
y.RootFolderID = "-11"
|
||||||
|
y.FamilyID = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// 限制上传线程数
|
// 限制上传线程数
|
||||||
@ -82,24 +79,11 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 处理家庭云ID
|
// 处理家庭云ID
|
||||||
if y.FamilyID == "" {
|
if y.isFamily() && y.FamilyID == "" {
|
||||||
if y.FamilyID, err = y.getFamilyID(); err != nil {
|
if y.FamilyID, err = y.getFamilyID(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 创建中转文件夹,防止重名文件
|
|
||||||
if y.FamilyTransfer {
|
|
||||||
if y.familyTransferFolder, err = y.createFamilyTransferFolder(32); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
y.cleanFamilyTransferFile = utils.NewThrottle2(time.Minute, func() {
|
|
||||||
if err := y.cleanFamilyTransfer(context.TODO()); err != nil {
|
|
||||||
utils.Log.Errorf("cleanFamilyTransferFolderError:%s", err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,7 +92,7 @@ func (y *Cloud189PC) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (y *Cloud189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
return y.getFiles(ctx, dir.GetID(), y.isFamily())
|
return y.getFiles(ctx, dir.GetID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
@ -116,9 +100,8 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
URL string `json:"fileDownloadUrl"`
|
URL string `json:"fileDownloadUrl"`
|
||||||
}
|
}
|
||||||
|
|
||||||
isFamily := y.isFamily()
|
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
}
|
}
|
||||||
fullUrl += "/getFileDownloadUrl.action"
|
fullUrl += "/getFileDownloadUrl.action"
|
||||||
@ -126,7 +109,7 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
_, err := y.get(fullUrl, func(r *resty.Request) {
|
_, err := y.get(fullUrl, func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
r.SetQueryParam("fileId", file.GetID())
|
r.SetQueryParam("fileId", file.GetID())
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
r.SetQueryParams(map[string]string{
|
r.SetQueryParams(map[string]string{
|
||||||
"familyId": y.FamilyID,
|
"familyId": y.FamilyID,
|
||||||
})
|
})
|
||||||
@ -136,7 +119,7 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
"flag": "1",
|
"flag": "1",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &downloadUrl, isFamily)
|
}, &downloadUrl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -173,9 +156,8 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||||
isFamily := y.isFamily()
|
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
}
|
}
|
||||||
fullUrl += "/createFolder.action"
|
fullUrl += "/createFolder.action"
|
||||||
@ -187,7 +169,7 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
|
|||||||
"folderName": dirName,
|
"folderName": dirName,
|
||||||
"relativePath": "",
|
"relativePath": "",
|
||||||
})
|
})
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"familyId": y.FamilyID,
|
"familyId": y.FamilyID,
|
||||||
"parentId": parentDir.GetID(),
|
"parentId": parentDir.GetID(),
|
||||||
@ -197,7 +179,7 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
|
|||||||
"parentFolderId": parentDir.GetID(),
|
"parentFolderId": parentDir.GetID(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &newFolder, isFamily)
|
}, &newFolder)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -205,14 +187,27 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
isFamily := y.isFamily()
|
var resp CreateBatchTaskResp
|
||||||
other := map[string]string{"targetFileName": dstDir.GetName()}
|
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
resp, err := y.CreateBatchTask("MOVE", IF(isFamily, y.FamilyID, ""), dstDir.GetID(), other, BatchTaskInfo{
|
req.SetFormData(map[string]string{
|
||||||
FileId: srcObj.GetID(),
|
"type": "MOVE",
|
||||||
FileName: srcObj.GetName(),
|
"taskInfos": MustString(utils.Json.MarshalToString(
|
||||||
IsFolder: BoolToNumber(srcObj.IsDir()),
|
[]BatchTaskInfo{
|
||||||
})
|
{
|
||||||
|
FileId: srcObj.GetID(),
|
||||||
|
FileName: srcObj.GetName(),
|
||||||
|
IsFolder: BoolToNumber(srcObj.IsDir()),
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
"targetFolderId": dstDir.GetID(),
|
||||||
|
})
|
||||||
|
if y.isFamily() {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"familyId": y.FamilyID,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -223,11 +218,10 @@ func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||||
isFamily := y.isFamily()
|
|
||||||
queryParam := make(map[string]string)
|
queryParam := make(map[string]string)
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
method := http.MethodPost
|
method := http.MethodPost
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
method = http.MethodGet
|
method = http.MethodGet
|
||||||
queryParam["familyId"] = y.FamilyID
|
queryParam["familyId"] = y.FamilyID
|
||||||
@ -251,7 +245,7 @@ func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName strin
|
|||||||
|
|
||||||
_, err := y.request(fullUrl, method, func(req *resty.Request) {
|
_, err := y.request(fullUrl, method, func(req *resty.Request) {
|
||||||
req.SetContext(ctx).SetQueryParams(queryParam)
|
req.SetContext(ctx).SetQueryParams(queryParam)
|
||||||
}, nil, newObj, isFamily)
|
}, nil, newObj)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -259,15 +253,28 @@ func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName strin
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
isFamily := y.isFamily()
|
var resp CreateBatchTaskResp
|
||||||
other := map[string]string{"targetFileName": dstDir.GetName()}
|
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
resp, err := y.CreateBatchTask("COPY", IF(isFamily, y.FamilyID, ""), dstDir.GetID(), other, BatchTaskInfo{
|
req.SetFormData(map[string]string{
|
||||||
FileId: srcObj.GetID(),
|
"type": "COPY",
|
||||||
FileName: srcObj.GetName(),
|
"taskInfos": MustString(utils.Json.MarshalToString(
|
||||||
IsFolder: BoolToNumber(srcObj.IsDir()),
|
[]BatchTaskInfo{
|
||||||
})
|
{
|
||||||
|
FileId: srcObj.GetID(),
|
||||||
|
FileName: srcObj.GetName(),
|
||||||
|
IsFolder: BoolToNumber(srcObj.IsDir()),
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
"targetFolderId": dstDir.GetID(),
|
||||||
|
"targetFileName": dstDir.GetName(),
|
||||||
|
})
|
||||||
|
if y.isFamily() {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"familyId": y.FamilyID,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -275,13 +282,27 @@ func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
isFamily := y.isFamily()
|
var resp CreateBatchTaskResp
|
||||||
|
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"type": "DELETE",
|
||||||
|
"taskInfos": MustString(utils.Json.MarshalToString(
|
||||||
|
[]*BatchTaskInfo{
|
||||||
|
{
|
||||||
|
FileId: obj.GetID(),
|
||||||
|
FileName: obj.GetName(),
|
||||||
|
IsFolder: BoolToNumber(obj.IsDir()),
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
})
|
||||||
|
|
||||||
resp, err := y.CreateBatchTask("DELETE", IF(isFamily, y.FamilyID, ""), "", nil, BatchTaskInfo{
|
if y.isFamily() {
|
||||||
FileId: obj.GetID(),
|
req.SetFormData(map[string]string{
|
||||||
FileName: obj.GetName(),
|
"familyId": y.FamilyID,
|
||||||
IsFolder: BoolToNumber(obj.IsDir()),
|
})
|
||||||
})
|
}
|
||||||
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -289,73 +310,25 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
return y.WaitBatchTask("DELETE", resp.TaskID, time.Millisecond*200)
|
return y.WaitBatchTask("DELETE", resp.TaskID, time.Millisecond*200)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (newObj model.Obj, err error) {
|
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
overwrite := true
|
|
||||||
isFamily := y.isFamily()
|
|
||||||
|
|
||||||
// 响应时间长,按需启用
|
// 响应时间长,按需启用
|
||||||
if y.Addition.RapidUpload && !stream.IsForceStreamUpload() {
|
if y.Addition.RapidUpload {
|
||||||
if newObj, err := y.RapidUpload(ctx, dstDir, stream, isFamily, overwrite); err == nil {
|
if newObj, err := y.RapidUpload(ctx, dstDir, stream); err == nil {
|
||||||
return newObj, nil
|
return newObj, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadMethod := y.UploadMethod
|
switch y.UploadMethod {
|
||||||
if stream.IsForceStreamUpload() {
|
case "old":
|
||||||
uploadMethod = "stream"
|
return y.OldUpload(ctx, dstDir, stream, up)
|
||||||
}
|
|
||||||
|
|
||||||
// 旧版上传家庭云也有限制
|
|
||||||
if uploadMethod == "old" {
|
|
||||||
return y.OldUpload(ctx, dstDir, stream, up, isFamily, overwrite)
|
|
||||||
}
|
|
||||||
|
|
||||||
// 开启家庭云转存
|
|
||||||
if !isFamily && y.FamilyTransfer {
|
|
||||||
// 修改上传目标为家庭云文件夹
|
|
||||||
transferDstDir := dstDir
|
|
||||||
dstDir = (y.familyTransferFolder.Value).(*Cloud189Folder)
|
|
||||||
y.familyTransferFolder = y.familyTransferFolder.Next()
|
|
||||||
|
|
||||||
isFamily = true
|
|
||||||
overwrite = false
|
|
||||||
|
|
||||||
defer func() {
|
|
||||||
if newObj != nil {
|
|
||||||
// 批量任务有概率删不掉
|
|
||||||
y.cleanFamilyTransferFile()
|
|
||||||
|
|
||||||
// 转存家庭云文件到个人云
|
|
||||||
err = y.SaveFamilyFileToPersonCloud(context.TODO(), y.FamilyID, newObj, transferDstDir, true)
|
|
||||||
|
|
||||||
task := BatchTaskInfo{
|
|
||||||
FileId: newObj.GetID(),
|
|
||||||
FileName: newObj.GetName(),
|
|
||||||
IsFolder: BoolToNumber(newObj.IsDir()),
|
|
||||||
}
|
|
||||||
|
|
||||||
// 删除源文件
|
|
||||||
if resp, err := y.CreateBatchTask("DELETE", y.FamilyID, "", nil, task); err == nil {
|
|
||||||
y.WaitBatchTask("DELETE", resp.TaskID, time.Second)
|
|
||||||
// 永久删除
|
|
||||||
if resp, err := y.CreateBatchTask("CLEAR_RECYCLE", y.FamilyID, "", nil, task); err == nil {
|
|
||||||
y.WaitBatchTask("CLEAR_RECYCLE", resp.TaskID, time.Second)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
newObj = nil
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
switch uploadMethod {
|
|
||||||
case "rapid":
|
case "rapid":
|
||||||
return y.FastUpload(ctx, dstDir, stream, up, isFamily, overwrite)
|
return y.FastUpload(ctx, dstDir, stream, up)
|
||||||
case "stream":
|
case "stream":
|
||||||
if stream.GetSize() == 0 {
|
if stream.GetSize() == 0 {
|
||||||
return y.FastUpload(ctx, dstDir, stream, up, isFamily, overwrite)
|
return y.FastUpload(ctx, dstDir, stream, up)
|
||||||
}
|
}
|
||||||
fallthrough
|
fallthrough
|
||||||
default:
|
default:
|
||||||
return y.StreamUpload(ctx, dstDir, stream, up, isFamily, overwrite)
|
return y.StreamUpload(ctx, dstDir, stream, up)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -192,19 +192,3 @@ func partSize(size int64) int64 {
|
|||||||
}
|
}
|
||||||
return DEFAULT
|
return DEFAULT
|
||||||
}
|
}
|
||||||
|
|
||||||
func isBool(bs ...bool) bool {
|
|
||||||
for _, b := range bs {
|
|
||||||
if b {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func IF[V any](o bool, t V, f V) V {
|
|
||||||
if o {
|
|
||||||
return t
|
|
||||||
}
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
|
@ -16,7 +16,6 @@ type Addition struct {
|
|||||||
FamilyID string `json:"family_id"`
|
FamilyID string `json:"family_id"`
|
||||||
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
|
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
|
||||||
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
||||||
FamilyTransfer bool `json:"family_transfer"`
|
|
||||||
RapidUpload bool `json:"rapid_upload"`
|
RapidUpload bool `json:"rapid_upload"`
|
||||||
NoUseOcr bool `json:"no_use_ocr"`
|
NoUseOcr bool `json:"no_use_ocr"`
|
||||||
}
|
}
|
||||||
|
@ -3,11 +3,10 @@ package _189pc
|
|||||||
import (
|
import (
|
||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// 居然有四种返回方式
|
// 居然有四种返回方式
|
||||||
@ -143,7 +142,7 @@ type FamilyInfoListResp struct {
|
|||||||
type FamilyInfoResp struct {
|
type FamilyInfoResp struct {
|
||||||
Count int `json:"count"`
|
Count int `json:"count"`
|
||||||
CreateTime string `json:"createTime"`
|
CreateTime string `json:"createTime"`
|
||||||
FamilyID int64 `json:"familyId"`
|
FamilyID int `json:"familyId"`
|
||||||
RemarkName string `json:"remarkName"`
|
RemarkName string `json:"remarkName"`
|
||||||
Type int `json:"type"`
|
Type int `json:"type"`
|
||||||
UseFlag int `json:"useFlag"`
|
UseFlag int `json:"useFlag"`
|
||||||
@ -243,12 +242,7 @@ type BatchTaskInfo struct {
|
|||||||
// IsFolder 是否是文件夹,0-否,1-是
|
// IsFolder 是否是文件夹,0-否,1-是
|
||||||
IsFolder int `json:"isFolder"`
|
IsFolder int `json:"isFolder"`
|
||||||
// SrcParentId 文件所在父目录ID
|
// SrcParentId 文件所在父目录ID
|
||||||
SrcParentId string `json:"srcParentId,omitempty"`
|
//SrcParentId string `json:"srcParentId"`
|
||||||
|
|
||||||
/* 冲突管理 */
|
|
||||||
// 1 -> 跳过 2 -> 保留 3 -> 覆盖
|
|
||||||
DealWay int `json:"dealWay,omitempty"`
|
|
||||||
IsConflict int `json:"isConflict,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* 上传部分 */
|
/* 上传部分 */
|
||||||
@ -361,14 +355,6 @@ type BatchTaskStateResp struct {
|
|||||||
TaskStatus int `json:"taskStatus"` //1 初始化 2 存在冲突 3 执行中,4 完成
|
TaskStatus int `json:"taskStatus"` //1 初始化 2 存在冲突 3 执行中,4 完成
|
||||||
}
|
}
|
||||||
|
|
||||||
type BatchTaskConflictTaskInfoResp struct {
|
|
||||||
SessionKey string `json:"sessionKey"`
|
|
||||||
TargetFolderID int `json:"targetFolderId"`
|
|
||||||
TaskID string `json:"taskId"`
|
|
||||||
TaskInfos []BatchTaskInfo
|
|
||||||
TaskType int `json:"taskType"`
|
|
||||||
}
|
|
||||||
|
|
||||||
/* query 加密参数*/
|
/* query 加密参数*/
|
||||||
type Params map[string]string
|
type Params map[string]string
|
||||||
|
|
||||||
|
@ -2,7 +2,6 @@ package _189pc
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"container/ring"
|
|
||||||
"context"
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
@ -55,11 +54,11 @@ const (
|
|||||||
CHANNEL_ID = "web_cloud.189.cn"
|
CHANNEL_ID = "web_cloud.189.cn"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool) map[string]string {
|
func (y *Cloud189PC) SignatureHeader(url, method, params string) map[string]string {
|
||||||
dateOfGmt := getHttpDateStr()
|
dateOfGmt := getHttpDateStr()
|
||||||
sessionKey := y.tokenInfo.SessionKey
|
sessionKey := y.tokenInfo.SessionKey
|
||||||
sessionSecret := y.tokenInfo.SessionSecret
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
sessionKey = y.tokenInfo.FamilySessionKey
|
sessionKey = y.tokenInfo.FamilySessionKey
|
||||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||||
}
|
}
|
||||||
@ -73,9 +72,9 @@ func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool)
|
|||||||
return header
|
return header
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
|
func (y *Cloud189PC) EncryptParams(params Params) string {
|
||||||
sessionSecret := y.tokenInfo.SessionSecret
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||||
}
|
}
|
||||||
if params != nil {
|
if params != nil {
|
||||||
@ -84,17 +83,17 @@ func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}, isFamily ...bool) ([]byte, error) {
|
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
|
||||||
req := y.client.R().SetQueryParams(clientSuffix())
|
req := y.client.R().SetQueryParams(clientSuffix())
|
||||||
|
|
||||||
// 设置params
|
// 设置params
|
||||||
paramsData := y.EncryptParams(params, isBool(isFamily...))
|
paramsData := y.EncryptParams(params)
|
||||||
if paramsData != "" {
|
if paramsData != "" {
|
||||||
req.SetQueryParam("params", paramsData)
|
req.SetQueryParam("params", paramsData)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Signature
|
// Signature
|
||||||
req.SetHeaders(y.SignatureHeader(url, method, paramsData, isBool(isFamily...)))
|
req.SetHeaders(y.SignatureHeader(url, method, paramsData))
|
||||||
|
|
||||||
var erron RespErr
|
var erron RespErr
|
||||||
req.SetError(&erron)
|
req.SetError(&erron)
|
||||||
@ -114,33 +113,31 @@ func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, para
|
|||||||
if err = y.refreshSession(); err != nil {
|
if err = y.refreshSession(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return y.request(url, method, callback, params, resp, isFamily...)
|
return y.request(url, method, callback, params, resp)
|
||||||
}
|
|
||||||
|
|
||||||
// if erron.ErrorCode == "InvalidSessionKey" || erron.Code == "InvalidSessionKey" {
|
|
||||||
if strings.Contains(res.String(), "InvalidSessionKey") {
|
|
||||||
if err = y.refreshSession(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return y.request(url, method, callback, params, resp, isFamily...)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 处理错误
|
// 处理错误
|
||||||
if erron.HasError() {
|
if erron.HasError() {
|
||||||
|
if erron.ErrorCode == "InvalidSessionKey" {
|
||||||
|
if err = y.refreshSession(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return y.request(url, method, callback, params, resp)
|
||||||
|
}
|
||||||
return nil, &erron
|
return nil, &erron
|
||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}, isFamily ...bool) ([]byte, error) {
|
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
return y.request(url, http.MethodGet, callback, nil, resp, isFamily...)
|
return y.request(url, http.MethodGet, callback, nil, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}, isFamily ...bool) ([]byte, error) {
|
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
return y.request(url, http.MethodPost, callback, nil, resp, isFamily...)
|
return y.request(url, http.MethodPost, callback, nil, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader, isFamily bool) ([]byte, error) {
|
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader) ([]byte, error) {
|
||||||
req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file)
|
req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -157,7 +154,7 @@ func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]str
|
|||||||
}
|
}
|
||||||
|
|
||||||
if sign {
|
if sign {
|
||||||
for key, value := range y.SignatureHeader(url, http.MethodPut, "", isFamily) {
|
for key, value := range y.SignatureHeader(url, http.MethodPut, "") {
|
||||||
req.Header.Add(key, value)
|
req.Header.Add(key, value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -184,9 +181,9 @@ func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]str
|
|||||||
}
|
}
|
||||||
return body, nil
|
return body, nil
|
||||||
}
|
}
|
||||||
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string, isFamily bool) ([]model.Obj, error) {
|
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
}
|
}
|
||||||
fullUrl += "/listFiles.action"
|
fullUrl += "/listFiles.action"
|
||||||
@ -204,7 +201,7 @@ func (y *Cloud189PC) getFiles(ctx context.Context, fileId string, isFamily bool)
|
|||||||
"pageNum": fmt.Sprint(pageNum),
|
"pageNum": fmt.Sprint(pageNum),
|
||||||
"pageSize": "130",
|
"pageSize": "130",
|
||||||
})
|
})
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
r.SetQueryParams(map[string]string{
|
r.SetQueryParams(map[string]string{
|
||||||
"familyId": y.FamilyID,
|
"familyId": y.FamilyID,
|
||||||
"orderBy": toFamilyOrderBy(y.OrderBy),
|
"orderBy": toFamilyOrderBy(y.OrderBy),
|
||||||
@ -217,7 +214,7 @@ func (y *Cloud189PC) getFiles(ctx context.Context, fileId string, isFamily bool)
|
|||||||
"descending": toDesc(y.OrderDirection),
|
"descending": toDesc(y.OrderDirection),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &resp, isFamily)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -440,7 +437,7 @@ func (y *Cloud189PC) refreshSession() (err error) {
|
|||||||
|
|
||||||
// 普通上传
|
// 普通上传
|
||||||
// 无法上传大小为0的文件
|
// 无法上传大小为0的文件
|
||||||
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
|
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
var sliceSize = partSize(file.GetSize())
|
var sliceSize = partSize(file.GetSize())
|
||||||
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
|
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
|
||||||
lastPartSize := file.GetSize() % sliceSize
|
lastPartSize := file.GetSize() % sliceSize
|
||||||
@ -457,7 +454,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
}
|
}
|
||||||
|
|
||||||
fullUrl := UPLOAD_URL
|
fullUrl := UPLOAD_URL
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
params.Set("familyId", y.FamilyID)
|
params.Set("familyId", y.FamilyID)
|
||||||
fullUrl += "/family"
|
fullUrl += "/family"
|
||||||
} else {
|
} else {
|
||||||
@ -469,7 +466,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
var initMultiUpload InitMultiUploadResp
|
var initMultiUpload InitMultiUploadResp
|
||||||
_, err := y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
_, err := y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
}, params, &initMultiUpload, isFamily)
|
}, params, &initMultiUpload)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -505,14 +502,14 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
partInfo := fmt.Sprintf("%d-%s", i, base64.StdEncoding.EncodeToString(md5Bytes))
|
partInfo := fmt.Sprintf("%d-%s", i, base64.StdEncoding.EncodeToString(md5Bytes))
|
||||||
|
|
||||||
threadG.Go(func(ctx context.Context) error {
|
threadG.Go(func(ctx context.Context) error {
|
||||||
uploadUrls, err := y.GetMultiUploadUrls(ctx, isFamily, initMultiUpload.Data.UploadFileID, partInfo)
|
uploadUrls, err := y.GetMultiUploadUrls(ctx, initMultiUpload.Data.UploadFileID, partInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// step.4 上传切片
|
// step.4 上传切片
|
||||||
uploadUrl := uploadUrls[0]
|
uploadUrl := uploadUrls[0]
|
||||||
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, bytes.NewReader(byteData), isFamily)
|
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, bytes.NewReader(byteData))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -541,21 +538,21 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
"sliceMd5": sliceMd5Hex,
|
"sliceMd5": sliceMd5Hex,
|
||||||
"lazyCheck": "1",
|
"lazyCheck": "1",
|
||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
"opertype": IF(overwrite, "3", "1"),
|
"opertype": "3",
|
||||||
}, &resp, isFamily)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return resp.toFile(), nil
|
return resp.toFile(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, isFamily bool, overwrite bool) (model.Obj, error) {
|
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
|
||||||
fileMd5 := stream.GetHash().GetHash(utils.MD5)
|
fileMd5 := stream.GetHash().GetHash(utils.MD5)
|
||||||
if len(fileMd5) < utils.MD5.Width {
|
if len(fileMd5) < utils.MD5.Width {
|
||||||
return nil, errors.New("invalid hash")
|
return nil, errors.New("invalid hash")
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()), isFamily)
|
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -564,11 +561,11 @@ func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream m
|
|||||||
return nil, errors.New("rapid upload fail")
|
return nil, errors.New("rapid upload fail")
|
||||||
}
|
}
|
||||||
|
|
||||||
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId, isFamily, overwrite)
|
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 快传
|
// 快传
|
||||||
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
|
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
tempFile, err := file.CacheFullInTempFile()
|
tempFile, err := file.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -597,7 +594,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}
|
}
|
||||||
|
|
||||||
silceMd5.Reset()
|
silceMd5.Reset()
|
||||||
if _, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
|
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
md5Byte := silceMd5.Sum(nil)
|
md5Byte := silceMd5.Sum(nil)
|
||||||
@ -612,7 +609,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}
|
}
|
||||||
|
|
||||||
fullUrl := UPLOAD_URL
|
fullUrl := UPLOAD_URL
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
fullUrl += "/family"
|
fullUrl += "/family"
|
||||||
} else {
|
} else {
|
||||||
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
|
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
|
||||||
@ -631,13 +628,13 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
"sliceSize": fmt.Sprint(sliceSize),
|
"sliceSize": fmt.Sprint(sliceSize),
|
||||||
"sliceMd5": sliceMd5Hex,
|
"sliceMd5": sliceMd5Hex,
|
||||||
}
|
}
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
params.Set("familyId", y.FamilyID)
|
params.Set("familyId", y.FamilyID)
|
||||||
}
|
}
|
||||||
var uploadInfo InitMultiUploadResp
|
var uploadInfo InitMultiUploadResp
|
||||||
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
}, params, &uploadInfo, isFamily)
|
}, params, &uploadInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -662,7 +659,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
i, uploadPart := i, uploadPart
|
i, uploadPart := i, uploadPart
|
||||||
threadG.Go(func(ctx context.Context) error {
|
threadG.Go(func(ctx context.Context) error {
|
||||||
// step.3 获取上传链接
|
// step.3 获取上传链接
|
||||||
uploadUrls, err := y.GetMultiUploadUrls(ctx, isFamily, uploadInfo.UploadFileID, uploadPart)
|
uploadUrls, err := y.GetMultiUploadUrls(ctx, uploadInfo.UploadFileID, uploadPart)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -674,7 +671,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}
|
}
|
||||||
|
|
||||||
// step.4 上传切片
|
// step.4 上传切片
|
||||||
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, io.NewSectionReader(tempFile, offset, byteSize), isFamily)
|
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, io.NewSectionReader(tempFile, offset, byteSize))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -701,8 +698,8 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}, Params{
|
}, Params{
|
||||||
"uploadFileId": uploadInfo.UploadFileID,
|
"uploadFileId": uploadInfo.UploadFileID,
|
||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
"opertype": IF(overwrite, "3", "1"),
|
"opertype": "3",
|
||||||
}, &resp, isFamily)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -711,9 +708,9 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
|
|
||||||
// 获取上传切片信息
|
// 获取上传切片信息
|
||||||
// 对http body有大小限制,分片信息太多会出错
|
// 对http body有大小限制,分片信息太多会出错
|
||||||
func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, isFamily bool, uploadFileId string, partInfo ...string) ([]UploadUrlInfo, error) {
|
func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string, partInfo ...string) ([]UploadUrlInfo, error) {
|
||||||
fullUrl := UPLOAD_URL
|
fullUrl := UPLOAD_URL
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
fullUrl += "/family"
|
fullUrl += "/family"
|
||||||
} else {
|
} else {
|
||||||
fullUrl += "/person"
|
fullUrl += "/person"
|
||||||
@ -726,7 +723,7 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, isFamily bool, uplo
|
|||||||
}, Params{
|
}, Params{
|
||||||
"uploadFileId": uploadFileId,
|
"uploadFileId": uploadFileId,
|
||||||
"partInfo": strings.Join(partInfo, ","),
|
"partInfo": strings.Join(partInfo, ","),
|
||||||
}, &uploadUrlsResp, isFamily)
|
}, &uploadUrlsResp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -755,7 +752,7 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, isFamily bool, uplo
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 旧版本上传,家庭云不支持覆盖
|
// 旧版本上传,家庭云不支持覆盖
|
||||||
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
|
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
tempFile, err := file.CacheFullInTempFile()
|
tempFile, err := file.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -766,7 +763,7 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 创建上传会话
|
// 创建上传会话
|
||||||
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()), isFamily)
|
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -783,14 +780,14 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
"Expect": "100-continue",
|
"Expect": "100-continue",
|
||||||
}
|
}
|
||||||
|
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
header["FamilyId"] = fmt.Sprint(y.FamilyID)
|
header["FamilyId"] = fmt.Sprint(y.FamilyID)
|
||||||
header["UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
header["UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
||||||
} else {
|
} else {
|
||||||
header["Edrive-UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
header["Edrive-UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile), isFamily)
|
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile))
|
||||||
if err, ok := err.(*RespErr); ok && err.Code != "InputStreamReadError" {
|
if err, ok := err.(*RespErr); ok && err.Code != "InputStreamReadError" {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -805,10 +802,10 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
||||||
"resumePolicy": "1",
|
"resumePolicy": "1",
|
||||||
})
|
})
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
req.SetQueryParam("familyId", fmt.Sprint(y.FamilyID))
|
req.SetQueryParam("familyId", fmt.Sprint(y.FamilyID))
|
||||||
}
|
}
|
||||||
}, &status, isFamily)
|
}, &status)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -818,20 +815,20 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
|
up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
|
||||||
}
|
}
|
||||||
|
|
||||||
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId, isFamily, overwrite)
|
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 创建上传会话
|
// 创建上传会话
|
||||||
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string, isFamily bool) (*CreateUploadFileResp, error) {
|
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string) (*CreateUploadFileResp, error) {
|
||||||
var uploadInfo CreateUploadFileResp
|
var uploadInfo CreateUploadFileResp
|
||||||
|
|
||||||
fullUrl := API_URL + "/createUploadFile.action"
|
fullUrl := API_URL + "/createUploadFile.action"
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
fullUrl = API_URL + "/family/file/createFamilyFile.action"
|
fullUrl = API_URL + "/family/file/createFamilyFile.action"
|
||||||
}
|
}
|
||||||
_, err := y.post(fullUrl, func(req *resty.Request) {
|
_, err := y.post(fullUrl, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"familyId": y.FamilyID,
|
"familyId": y.FamilyID,
|
||||||
"parentId": parentID,
|
"parentId": parentID,
|
||||||
@ -852,7 +849,7 @@ func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileM
|
|||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &uploadInfo, isFamily)
|
}, &uploadInfo)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -861,11 +858,11 @@ func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileM
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 提交上传文件
|
// 提交上传文件
|
||||||
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64, isFamily bool, overwrite bool) (model.Obj, error) {
|
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64) (model.Obj, error) {
|
||||||
var resp OldCommitUploadFileResp
|
var resp OldCommitUploadFileResp
|
||||||
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
|
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
if isFamily {
|
if y.isFamily() {
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"ResumePolicy": "1",
|
"ResumePolicy": "1",
|
||||||
"UploadFileId": fmt.Sprint(uploadFileID),
|
"UploadFileId": fmt.Sprint(uploadFileID),
|
||||||
@ -873,13 +870,13 @@ func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string,
|
|||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
req.SetFormData(map[string]string{
|
req.SetFormData(map[string]string{
|
||||||
"opertype": IF(overwrite, "3", "1"),
|
"opertype": "3",
|
||||||
"resumePolicy": "1",
|
"resumePolicy": "1",
|
||||||
"uploadFileId": fmt.Sprint(uploadFileID),
|
"uploadFileId": fmt.Sprint(uploadFileID),
|
||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &resp, isFamily)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -898,100 +895,10 @@ func (y *Cloud189PC) isLogin() bool {
|
|||||||
return err == nil
|
return err == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// 创建家庭云中转文件夹
|
|
||||||
func (y *Cloud189PC) createFamilyTransferFolder(count int) (*ring.Ring, error) {
|
|
||||||
folders := ring.New(count)
|
|
||||||
var rootFolder Cloud189Folder
|
|
||||||
_, err := y.post(API_URL+"/family/file/createFolder.action", func(req *resty.Request) {
|
|
||||||
req.SetQueryParams(map[string]string{
|
|
||||||
"folderName": "FamilyTransferFolder",
|
|
||||||
"familyId": y.FamilyID,
|
|
||||||
})
|
|
||||||
}, &rootFolder, true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
folderCount := 0
|
|
||||||
|
|
||||||
// 获取已有目录
|
|
||||||
files, err := y.getFiles(context.TODO(), rootFolder.GetID(), true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for _, file := range files {
|
|
||||||
if folder, ok := file.(*Cloud189Folder); ok {
|
|
||||||
folders.Value = folder
|
|
||||||
folders = folders.Next()
|
|
||||||
folderCount++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 创建新的目录
|
|
||||||
for folderCount < count {
|
|
||||||
var newFolder Cloud189Folder
|
|
||||||
_, err := y.post(API_URL+"/family/file/createFolder.action", func(req *resty.Request) {
|
|
||||||
req.SetQueryParams(map[string]string{
|
|
||||||
"folderName": uuid.NewString(),
|
|
||||||
"familyId": y.FamilyID,
|
|
||||||
"parentId": rootFolder.GetID(),
|
|
||||||
})
|
|
||||||
}, &newFolder, true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
folders.Value = &newFolder
|
|
||||||
folders = folders.Next()
|
|
||||||
folderCount++
|
|
||||||
}
|
|
||||||
return folders, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 清理中转文件夹
|
|
||||||
func (y *Cloud189PC) cleanFamilyTransfer(ctx context.Context) error {
|
|
||||||
var tasks []BatchTaskInfo
|
|
||||||
r := y.familyTransferFolder
|
|
||||||
for p := r.Next(); p != r; p = p.Next() {
|
|
||||||
folder := p.Value.(*Cloud189Folder)
|
|
||||||
|
|
||||||
files, err := y.getFiles(ctx, folder.GetID(), true)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, file := range files {
|
|
||||||
tasks = append(tasks, BatchTaskInfo{
|
|
||||||
FileId: file.GetID(),
|
|
||||||
FileName: file.GetName(),
|
|
||||||
IsFolder: BoolToNumber(file.IsDir()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(tasks) > 0 {
|
|
||||||
// 删除
|
|
||||||
resp, err := y.CreateBatchTask("DELETE", y.FamilyID, "", nil, tasks...)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
err = y.WaitBatchTask("DELETE", resp.TaskID, time.Second)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// 永久删除
|
|
||||||
resp, err = y.CreateBatchTask("CLEAR_RECYCLE", y.FamilyID, "", nil, tasks...)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
err = y.WaitBatchTask("CLEAR_RECYCLE", resp.TaskID, time.Second)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 获取家庭云所有用户信息
|
// 获取家庭云所有用户信息
|
||||||
func (y *Cloud189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
func (y *Cloud189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
||||||
var resp FamilyInfoListResp
|
var resp FamilyInfoListResp
|
||||||
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp, true)
|
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -1015,73 +922,6 @@ func (y *Cloud189PC) getFamilyID() (string, error) {
|
|||||||
return fmt.Sprint(infos[0].FamilyID), nil
|
return fmt.Sprint(infos[0].FamilyID), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// 保存家庭云中的文件到个人云
|
|
||||||
func (y *Cloud189PC) SaveFamilyFileToPersonCloud(ctx context.Context, familyId string, srcObj, dstDir model.Obj, overwrite bool) error {
|
|
||||||
// _, err := y.post(API_URL+"/family/file/saveFileToMember.action", func(req *resty.Request) {
|
|
||||||
// req.SetQueryParams(map[string]string{
|
|
||||||
// "channelId": "home",
|
|
||||||
// "familyId": familyId,
|
|
||||||
// "destParentId": destParentId,
|
|
||||||
// "fileIdList": familyFileId,
|
|
||||||
// })
|
|
||||||
// }, nil)
|
|
||||||
// return err
|
|
||||||
|
|
||||||
task := BatchTaskInfo{
|
|
||||||
FileId: srcObj.GetID(),
|
|
||||||
FileName: srcObj.GetName(),
|
|
||||||
IsFolder: BoolToNumber(srcObj.IsDir()),
|
|
||||||
}
|
|
||||||
resp, err := y.CreateBatchTask("COPY", familyId, dstDir.GetID(), map[string]string{
|
|
||||||
"groupId": "null",
|
|
||||||
"copyType": "2",
|
|
||||||
"shareId": "null",
|
|
||||||
}, task)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
|
||||||
state, err := y.CheckBatchTask("COPY", resp.TaskID)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
switch state.TaskStatus {
|
|
||||||
case 2:
|
|
||||||
task.DealWay = IF(overwrite, 3, 2)
|
|
||||||
// 冲突时覆盖文件
|
|
||||||
if err := y.ManageBatchTask("COPY", resp.TaskID, dstDir.GetID(), task); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
case 4:
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
time.Sleep(time.Millisecond * 400)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (y *Cloud189PC) CreateBatchTask(aType string, familyID string, targetFolderId string, other map[string]string, taskInfos ...BatchTaskInfo) (*CreateBatchTaskResp, error) {
|
|
||||||
var resp CreateBatchTaskResp
|
|
||||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"type": aType,
|
|
||||||
"taskInfos": MustString(utils.Json.MarshalToString(taskInfos)),
|
|
||||||
})
|
|
||||||
if targetFolderId != "" {
|
|
||||||
req.SetFormData(map[string]string{"targetFolderId": targetFolderId})
|
|
||||||
}
|
|
||||||
if familyID != "" {
|
|
||||||
req.SetFormData(map[string]string{"familyId": familyID})
|
|
||||||
}
|
|
||||||
req.SetFormData(other)
|
|
||||||
}, &resp, familyID != "")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 检测任务状态
|
|
||||||
func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStateResp, error) {
|
func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStateResp, error) {
|
||||||
var resp BatchTaskStateResp
|
var resp BatchTaskStateResp
|
||||||
_, err := y.post(API_URL+"/batch/checkBatchTask.action", func(req *resty.Request) {
|
_, err := y.post(API_URL+"/batch/checkBatchTask.action", func(req *resty.Request) {
|
||||||
@ -1096,37 +936,6 @@ func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStat
|
|||||||
return &resp, nil
|
return &resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取冲突的任务信息
|
|
||||||
func (y *Cloud189PC) GetConflictTaskInfo(aType string, taskID string) (*BatchTaskConflictTaskInfoResp, error) {
|
|
||||||
var resp BatchTaskConflictTaskInfoResp
|
|
||||||
_, err := y.post(API_URL+"/batch/getConflictTaskInfo.action", func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"type": aType,
|
|
||||||
"taskId": taskID,
|
|
||||||
})
|
|
||||||
}, &resp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 处理冲突
|
|
||||||
func (y *Cloud189PC) ManageBatchTask(aType string, taskID string, targetFolderId string, taskInfos ...BatchTaskInfo) error {
|
|
||||||
_, err := y.post(API_URL+"/batch/manageBatchTask.action", func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"targetFolderId": targetFolderId,
|
|
||||||
"type": aType,
|
|
||||||
"taskId": taskID,
|
|
||||||
"taskInfos": MustString(utils.Json.MarshalToString(taskInfos)),
|
|
||||||
})
|
|
||||||
}, nil)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var ErrIsConflict = errors.New("there is a conflict with the target object")
|
|
||||||
|
|
||||||
// 等待任务完成
|
|
||||||
func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration) error {
|
func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration) error {
|
||||||
for {
|
for {
|
||||||
state, err := y.CheckBatchTask(aType, taskID)
|
state, err := y.CheckBatchTask(aType, taskID)
|
||||||
@ -1135,7 +944,7 @@ func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration)
|
|||||||
}
|
}
|
||||||
switch state.TaskStatus {
|
switch state.TaskStatus {
|
||||||
case 2:
|
case 2:
|
||||||
return ErrIsConflict
|
return errors.New("there is a conflict with the target object")
|
||||||
case 4:
|
case 4:
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/fs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
)
|
)
|
||||||
@ -46,9 +45,6 @@ func (d *Alias) Init(ctx context.Context) error {
|
|||||||
d.oneKey = k
|
d.oneKey = k
|
||||||
}
|
}
|
||||||
d.autoFlatten = true
|
d.autoFlatten = true
|
||||||
} else {
|
|
||||||
d.oneKey = ""
|
|
||||||
d.autoFlatten = false
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -91,9 +87,8 @@ func (d *Alias) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
|||||||
return nil, errs.ObjectNotFound
|
return nil, errs.ObjectNotFound
|
||||||
}
|
}
|
||||||
var objs []model.Obj
|
var objs []model.Obj
|
||||||
fsArgs := &fs.ListArgs{NoLog: true, Refresh: args.Refresh}
|
|
||||||
for _, dst := range dsts {
|
for _, dst := range dsts {
|
||||||
tmp, err := d.list(ctx, dst, sub, fsArgs)
|
tmp, err := d.list(ctx, dst, sub)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
objs = append(objs, tmp...)
|
objs = append(objs, tmp...)
|
||||||
}
|
}
|
||||||
@ -116,26 +111,4 @@ func (d *Alias) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
|||||||
return nil, errs.ObjectNotFound
|
return nil, errs.ObjectNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Alias) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
|
||||||
reqPath, err := d.getReqPath(ctx, srcObj)
|
|
||||||
if err == nil {
|
|
||||||
return fs.Rename(ctx, *reqPath, newName)
|
|
||||||
}
|
|
||||||
if errs.IsNotImplement(err) {
|
|
||||||
return errors.New("same-name files cannot be Rename")
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Alias) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
reqPath, err := d.getReqPath(ctx, obj)
|
|
||||||
if err == nil {
|
|
||||||
return fs.Remove(ctx, *reqPath)
|
|
||||||
}
|
|
||||||
if errs.IsNotImplement(err) {
|
|
||||||
return errors.New("same-name files cannot be Delete")
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*Alias)(nil)
|
var _ driver.Driver = (*Alias)(nil)
|
||||||
|
@ -9,25 +9,19 @@ type Addition struct {
|
|||||||
// Usually one of two
|
// Usually one of two
|
||||||
// driver.RootPath
|
// driver.RootPath
|
||||||
// define other
|
// define other
|
||||||
Paths string `json:"paths" required:"true" type:"text"`
|
Paths string `json:"paths" required:"true" type:"text"`
|
||||||
ProtectSameName bool `json:"protect_same_name" default:"true" required:"false" help:"Protects same-name files from Delete or Rename"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "Alias",
|
Name: "Alias",
|
||||||
LocalSort: true,
|
LocalSort: true,
|
||||||
NoCache: true,
|
NoCache: true,
|
||||||
NoUpload: true,
|
NoUpload: true,
|
||||||
DefaultRoot: "/",
|
DefaultRoot: "/",
|
||||||
ProxyRangeOption: true,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
return &Alias{
|
return &Alias{}
|
||||||
Addition: Addition{
|
|
||||||
ProtectSameName: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
stdpath "path"
|
stdpath "path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/sign"
|
"github.com/alist-org/alist/v3/internal/sign"
|
||||||
@ -16,7 +15,7 @@ import (
|
|||||||
|
|
||||||
func (d *Alias) listRoot() []model.Obj {
|
func (d *Alias) listRoot() []model.Obj {
|
||||||
var objs []model.Obj
|
var objs []model.Obj
|
||||||
for k := range d.pathMap {
|
for k, _ := range d.pathMap {
|
||||||
obj := model.Object{
|
obj := model.Object{
|
||||||
Name: k,
|
Name: k,
|
||||||
IsFolder: true,
|
IsFolder: true,
|
||||||
@ -65,8 +64,8 @@ func (d *Alias) get(ctx context.Context, path string, dst, sub string) (model.Ob
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Alias) list(ctx context.Context, dst, sub string, args *fs.ListArgs) ([]model.Obj, error) {
|
func (d *Alias) list(ctx context.Context, dst, sub string) ([]model.Obj, error) {
|
||||||
objs, err := fs.List(ctx, stdpath.Join(dst, sub), args)
|
objs, err := fs.List(ctx, stdpath.Join(dst, sub), &fs.ListArgs{NoLog: true})
|
||||||
// the obj must implement the model.SetPath interface
|
// the obj must implement the model.SetPath interface
|
||||||
// return objs, err
|
// return objs, err
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -103,49 +102,13 @@ func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs)
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
||||||
link := &model.Link{
|
return &model.Link{
|
||||||
URL: fmt.Sprintf("%s/p%s?sign=%s",
|
URL: fmt.Sprintf("%s/p%s?sign=%s",
|
||||||
common.GetApiUrl(args.HttpReq),
|
common.GetApiUrl(args.HttpReq),
|
||||||
utils.EncodePath(reqPath, true),
|
utils.EncodePath(reqPath, true),
|
||||||
sign.Sign(reqPath)),
|
sign.Sign(reqPath)),
|
||||||
}
|
}, nil
|
||||||
if args.HttpReq != nil && d.ProxyRange {
|
|
||||||
link.RangeReadCloser = common.NoProxyRange
|
|
||||||
}
|
|
||||||
return link, nil
|
|
||||||
}
|
}
|
||||||
link, _, err := fs.Link(ctx, reqPath, args)
|
link, _, err := fs.Link(ctx, reqPath, args)
|
||||||
return link, err
|
return link, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Alias) getReqPath(ctx context.Context, obj model.Obj) (*string, error) {
|
|
||||||
root, sub := d.getRootAndPath(obj.GetPath())
|
|
||||||
if sub == "" {
|
|
||||||
return nil, errs.NotSupport
|
|
||||||
}
|
|
||||||
dsts, ok := d.pathMap[root]
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
var reqPath *string
|
|
||||||
for _, dst := range dsts {
|
|
||||||
path := stdpath.Join(dst, sub)
|
|
||||||
_, err := fs.Get(ctx, path, &fs.GetArgs{NoLog: true})
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !d.ProtectSameName {
|
|
||||||
return &path, nil
|
|
||||||
}
|
|
||||||
if ok {
|
|
||||||
ok = false
|
|
||||||
} else {
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
reqPath = &path
|
|
||||||
}
|
|
||||||
if reqPath == nil {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
return reqPath, nil
|
|
||||||
}
|
|
||||||
|
@ -6,7 +6,9 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path"
|
"path"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
@ -15,7 +17,6 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type AListV3 struct {
|
type AListV3 struct {
|
||||||
@ -41,7 +42,7 @@ func (d *AListV3) Init(ctx context.Context) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
// if the username is not empty and the username is not the same as the current username, then login again
|
// if the username is not empty and the username is not the same as the current username, then login again
|
||||||
if d.Username != resp.Data.Username {
|
if d.Username != "" && d.Username != resp.Data.Username {
|
||||||
err = d.login()
|
err = d.login()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -108,19 +109,11 @@ func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
|||||||
|
|
||||||
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
var resp common.Resp[FsGetResp]
|
var resp common.Resp[FsGetResp]
|
||||||
// if PassUAToUpsteam is true, then pass the user-agent to the upstream
|
|
||||||
userAgent := base.UserAgent
|
|
||||||
if d.PassUAToUpsteam {
|
|
||||||
userAgent = args.Header.Get("user-agent")
|
|
||||||
if userAgent == "" {
|
|
||||||
userAgent = base.UserAgent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetResult(&resp).SetBody(FsGetReq{
|
req.SetResult(&resp).SetBody(FsGetReq{
|
||||||
Path: file.GetPath(),
|
Path: file.GetPath(),
|
||||||
Password: d.MetaPassword,
|
Password: d.MetaPassword,
|
||||||
}).SetHeader("user-agent", userAgent)
|
})
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -182,41 +175,14 @@ func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
req, err := http.NewRequestWithContext(ctx, http.MethodPut, d.Address+"/api/fs/put", stream)
|
_, err := d.requestWithTimeout("/fs/put", http.MethodPut, func(req *resty.Request) {
|
||||||
if err != nil {
|
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||||
return err
|
SetHeader("Password", d.MetaPassword).
|
||||||
}
|
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
||||||
req.Header.Set("Authorization", d.Token)
|
SetContentLength(true).
|
||||||
req.Header.Set("File-Path", path.Join(dstDir.GetPath(), stream.GetName()))
|
SetBody(io.ReadCloser(stream))
|
||||||
req.Header.Set("Password", d.MetaPassword)
|
}, time.Hour*6)
|
||||||
|
return err
|
||||||
req.ContentLength = stream.GetSize()
|
|
||||||
// client := base.NewHttpClient()
|
|
||||||
// client.Timeout = time.Hour * 6
|
|
||||||
res, err := base.HttpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
bytes, err := io.ReadAll(res.Body)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
log.Debugf("[alist_v3] response body: %s", string(bytes))
|
|
||||||
if res.StatusCode >= 400 {
|
|
||||||
return fmt.Errorf("request failed, status: %s", res.Status)
|
|
||||||
}
|
|
||||||
code := utils.Json.Get(bytes, "code").ToInt()
|
|
||||||
if code != 200 {
|
|
||||||
if code == 401 || code == 403 {
|
|
||||||
err = d.login()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(bytes, "message").ToString())
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||||
|
@ -7,20 +7,18 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
Address string `json:"url" required:"true"`
|
Address string `json:"url" required:"true"`
|
||||||
MetaPassword string `json:"meta_password"`
|
MetaPassword string `json:"meta_password"`
|
||||||
Username string `json:"username"`
|
Username string `json:"username"`
|
||||||
Password string `json:"password"`
|
Password string `json:"password"`
|
||||||
Token string `json:"token"`
|
Token string `json:"token"`
|
||||||
PassUAToUpsteam bool `json:"pass_ua_to_upsteam" default:"true"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "AList V3",
|
Name: "AList V3",
|
||||||
LocalSort: true,
|
LocalSort: true,
|
||||||
DefaultRoot: "/",
|
DefaultRoot: "/",
|
||||||
CheckStatus: true,
|
CheckStatus: true,
|
||||||
ProxyRangeOption: true,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -3,6 +3,7 @@ package alist_v3
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
@ -13,9 +14,6 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (d *AListV3) login() error {
|
func (d *AListV3) login() error {
|
||||||
if d.Username == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
var resp common.Resp[LoginResp]
|
var resp common.Resp[LoginResp]
|
||||||
_, err := d.request("/auth/login", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("/auth/login", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetResult(&resp).SetBody(base.Json{
|
req.SetResult(&resp).SetBody(base.Json{
|
||||||
@ -59,3 +57,33 @@ func (d *AListV3) request(api, method string, callback base.ReqCallback, retry .
|
|||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *AListV3) requestWithTimeout(api, method string, callback base.ReqCallback, timeout time.Duration, retry ...bool) ([]byte, error) {
|
||||||
|
url := d.Address + "/api" + api
|
||||||
|
client := base.NewRestyClient().SetTimeout(timeout)
|
||||||
|
req := client.R()
|
||||||
|
req.SetHeader("Authorization", d.Token)
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
res, err := req.Execute(method, url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
log.Debugf("[alist_v3] response body: %s", res.String())
|
||||||
|
if res.StatusCode() >= 400 {
|
||||||
|
return nil, fmt.Errorf("request failed, status: %s", res.Status())
|
||||||
|
}
|
||||||
|
code := utils.Json.Get(res.Body(), "code").ToInt()
|
||||||
|
if code != 200 {
|
||||||
|
if (code == 401 || code == 403) && !utils.IsBool(retry...) {
|
||||||
|
err = d.login()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return d.requestWithTimeout(api, method, callback, timeout, true)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(res.Body(), "message").ToString())
|
||||||
|
}
|
||||||
|
return res.Body(), nil
|
||||||
|
}
|
||||||
|
@ -194,7 +194,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
|
|||||||
}
|
}
|
||||||
if d.RapidUpload {
|
if d.RapidUpload {
|
||||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||||
utils.CopyWithBufferN(buf, file, 1024)
|
io.CopyN(buf, file, 1024)
|
||||||
reqBody["pre_hash"] = utils.HashData(utils.SHA1, buf.Bytes())
|
reqBody["pre_hash"] = utils.HashData(utils.SHA1, buf.Bytes())
|
||||||
if localFile != nil {
|
if localFile != nil {
|
||||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||||
|
@ -6,7 +6,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"resource"`
|
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"default"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
RefreshToken string `json:"refresh_token" required:"true"`
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||||
|
@ -136,7 +136,7 @@ func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
_, err = utils.CopyWithBufferN(buf, reader, length)
|
_, err = io.CopyN(buf, reader, length)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@ -164,7 +164,7 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
|
|||||||
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
||||||
createData["part_info_list"] = makePartInfos(count)
|
createData["part_info_list"] = makePartInfos(count)
|
||||||
// rapid upload
|
// rapid upload
|
||||||
rapidUpload := !stream.IsForceStreamUpload() && stream.GetSize() > 100*utils.KB && d.RapidUpload
|
rapidUpload := stream.GetSize() > 100*utils.KB && d.RapidUpload
|
||||||
if rapidUpload {
|
if rapidUpload {
|
||||||
log.Debugf("[aliyundrive_open] start cal pre_hash")
|
log.Debugf("[aliyundrive_open] start cal pre_hash")
|
||||||
// read 1024 bytes to calculate pre hash
|
// read 1024 bytes to calculate pre hash
|
||||||
@ -242,16 +242,13 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
|
|||||||
if remain := stream.GetSize() - offset; length > remain {
|
if remain := stream.GetSize() - offset; length > remain {
|
||||||
length = remain
|
length = remain
|
||||||
}
|
}
|
||||||
rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
|
//rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
|
||||||
if rapidUpload {
|
rd, err := stream.RangeRead(http_range.Range{Start: offset, Length: length})
|
||||||
srd, err := stream.RangeRead(http_range.Range{Start: offset, Length: length})
|
if err != nil {
|
||||||
if err != nil {
|
return nil, err
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
rd = utils.NewMultiReadable(srd)
|
|
||||||
}
|
}
|
||||||
err = retry.Do(func() error {
|
err = retry.Do(func() error {
|
||||||
rd.Reset()
|
//rd.Reset()
|
||||||
return d.uploadPart(ctx, rd, createResp.PartInfoList[i])
|
return d.uploadPart(ctx, rd, createResp.PartInfoList[i])
|
||||||
},
|
},
|
||||||
retry.Attempts(3),
|
retry.Attempts(3),
|
||||||
|
@ -22,29 +22,21 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/febbox"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/halalcloud"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/ilanzou"
|
_ "github.com/alist-org/alist/v3/drivers/ilanzou"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
|
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/kodbox"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/lanzou"
|
_ "github.com/alist-org/alist/v3/drivers/lanzou"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/lenovonas_share"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mega"
|
_ "github.com/alist-org/alist/v3/drivers/mega"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/netease_music"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_sharelink"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
|
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/quark_uc_tv"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/quqi"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/s3"
|
_ "github.com/alist-org/alist/v3/drivers/s3"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/sftp"
|
_ "github.com/alist-org/alist/v3/drivers/sftp"
|
||||||
@ -52,8 +44,6 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/teambition"
|
_ "github.com/alist-org/alist/v3/drivers/teambition"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/terabox"
|
_ "github.com/alist-org/alist/v3/drivers/terabox"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/thunder"
|
_ "github.com/alist-org/alist/v3/drivers/thunder"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/thunder_browser"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/thunderx"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/trainbit"
|
_ "github.com/alist-org/alist/v3/drivers/trainbit"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/uss"
|
_ "github.com/alist-org/alist/v3/drivers/uss"
|
||||||
|
@ -165,16 +165,9 @@ func (d *BaiduNetdisk) PutRapid(ctx context.Context, dstDir model.Obj, stream mo
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// 修复时间,具体原因见 Put 方法注释的 **注意**
|
|
||||||
newFile.Ctime = stream.CreateTime().Unix()
|
|
||||||
newFile.Mtime = stream.ModTime().Unix()
|
|
||||||
return fileToObj(newFile), nil
|
return fileToObj(newFile), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Put
|
|
||||||
//
|
|
||||||
// **注意**: 截至 2024/04/20 百度云盘 api 接口返回的时间永远是当前时间,而不是文件时间。
|
|
||||||
// 而实际上云盘存储的时间是文件时间,所以此处需要覆盖时间,保证缓存与云盘的数据一致
|
|
||||||
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
// rapid upload
|
// rapid upload
|
||||||
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
|
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
|
||||||
@ -211,7 +204,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
if i == count {
|
if i == count {
|
||||||
byteSize = lastBlockSize
|
byteSize = lastBlockSize
|
||||||
}
|
}
|
||||||
_, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
|
_, err := io.CopyN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
|
||||||
if err != nil && err != io.EOF {
|
if err != nil && err != io.EOF {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -252,9 +245,9 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
log.Debugf("%+v", precreateResp)
|
log.Debugf("%+v", precreateResp)
|
||||||
if precreateResp.ReturnType == 2 {
|
if precreateResp.ReturnType == 2 {
|
||||||
//rapid upload, since got md5 match from baidu server
|
//rapid upload, since got md5 match from baidu server
|
||||||
// 修复时间,具体原因见 Put 方法注释的 **注意**
|
if err != nil {
|
||||||
precreateResp.File.Ctime = ctime
|
return nil, err
|
||||||
precreateResp.File.Mtime = mtime
|
}
|
||||||
return fileToObj(precreateResp.File), nil
|
return fileToObj(precreateResp.File), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -305,9 +298,6 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// 修复时间,具体原因见 Put 方法注释的 **注意**
|
|
||||||
newFile.Ctime = ctime
|
|
||||||
newFile.Mtime = mtime
|
|
||||||
return fileToObj(newFile), nil
|
return fileToObj(newFile), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,16 +8,15 @@ import (
|
|||||||
type Addition struct {
|
type Addition struct {
|
||||||
RefreshToken string `json:"refresh_token" required:"true"`
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
|
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
|
||||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
|
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
|
||||||
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
||||||
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
||||||
CustomCrackUA string `json:"custom_crack_ua" required:"true" default:"netdisk"`
|
CustomCrackUA string `json:"custom_crack_ua" required:"true" default:"netdisk"`
|
||||||
AccessToken string
|
AccessToken string
|
||||||
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
||||||
UploadAPI string `json:"upload_api" default:"https://d.pcs.baidu.com"`
|
UploadAPI string `json:"upload_api" default:"https://d.pcs.baidu.com"`
|
||||||
CustomUploadPartSize int64 `json:"custom_upload_part_size" type:"number" default:"0" help:"0 for auto"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type TokenErrResp struct {
|
type TokenErrResp struct {
|
||||||
@ -56,11 +55,11 @@ func fileToObj(f File) *model.ObjThumb {
|
|||||||
if f.ServerFilename == "" {
|
if f.ServerFilename == "" {
|
||||||
f.ServerFilename = path.Base(f.Path)
|
f.ServerFilename = path.Base(f.Path)
|
||||||
}
|
}
|
||||||
if f.ServerCtime == 0 {
|
if f.LocalCtime == 0 {
|
||||||
f.ServerCtime = f.Ctime
|
f.LocalCtime = f.Ctime
|
||||||
}
|
}
|
||||||
if f.ServerMtime == 0 {
|
if f.LocalMtime == 0 {
|
||||||
f.ServerMtime = f.Mtime
|
f.LocalMtime = f.Mtime
|
||||||
}
|
}
|
||||||
return &model.ObjThumb{
|
return &model.ObjThumb{
|
||||||
Object: model.Object{
|
Object: model.Object{
|
||||||
@ -68,12 +67,12 @@ func fileToObj(f File) *model.ObjThumb {
|
|||||||
Path: f.Path,
|
Path: f.Path,
|
||||||
Name: f.ServerFilename,
|
Name: f.ServerFilename,
|
||||||
Size: f.Size,
|
Size: f.Size,
|
||||||
Modified: time.Unix(f.ServerMtime, 0),
|
Modified: time.Unix(f.LocalMtime, 0),
|
||||||
Ctime: time.Unix(f.ServerCtime, 0),
|
Ctime: time.Unix(f.LocalCtime, 0),
|
||||||
IsFolder: f.Isdir == 1,
|
IsFolder: f.Isdir == 1,
|
||||||
|
|
||||||
// 直接获取的MD5是错误的
|
// 直接获取的MD5是错误的
|
||||||
HashInfo: utils.NewHashInfo(utils.MD5, DecryptMd5(f.Md5)),
|
// HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
|
||||||
},
|
},
|
||||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
|
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,11 @@
|
|||||||
package baidu_netdisk
|
package baidu_netdisk
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/hex"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
"unicode"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -156,6 +153,8 @@ func (d *BaiduNetdisk) linkOfficial(file model.Obj, args model.LinkArgs) (*model
|
|||||||
u = res.Header().Get("location")
|
u = res.Header().Get("location")
|
||||||
//}
|
//}
|
||||||
|
|
||||||
|
updateObjMd5(file, "pan.baidu.com", u)
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: u,
|
URL: u,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
@ -179,6 +178,8 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
updateObjMd5(file, d.CustomCrackUA, resp.Info[0].Dlink)
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: resp.Info[0].Dlink,
|
URL: resp.Info[0].Dlink,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
@ -228,6 +229,19 @@ func joinTime(form map[string]string, ctime, mtime int64) {
|
|||||||
form["local_ctime"] = strconv.FormatInt(ctime, 10)
|
form["local_ctime"] = strconv.FormatInt(ctime, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func updateObjMd5(obj model.Obj, userAgent, u string) {
|
||||||
|
object := model.GetRawObject(obj)
|
||||||
|
if object != nil {
|
||||||
|
req, _ := http.NewRequest(http.MethodHead, u, nil)
|
||||||
|
req.Header.Add("User-Agent", userAgent)
|
||||||
|
resp, _ := base.HttpClient.Do(req)
|
||||||
|
if resp != nil {
|
||||||
|
contentMd5 := resp.Header.Get("Content-Md5")
|
||||||
|
object.HashInfo = utils.NewHashInfo(utils.MD5, contentMd5)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
DefaultSliceSize int64 = 4 * utils.MB
|
DefaultSliceSize int64 = 4 * utils.MB
|
||||||
VipSliceSize = 16 * utils.MB
|
VipSliceSize = 16 * utils.MB
|
||||||
@ -235,9 +249,6 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (d *BaiduNetdisk) getSliceSize() int64 {
|
func (d *BaiduNetdisk) getSliceSize() int64 {
|
||||||
if d.CustomUploadPartSize != 0 {
|
|
||||||
return d.CustomUploadPartSize
|
|
||||||
}
|
|
||||||
switch d.vipType {
|
switch d.vipType {
|
||||||
case 1:
|
case 1:
|
||||||
return VipSliceSize
|
return VipSliceSize
|
||||||
@ -253,40 +264,3 @@ func (d *BaiduNetdisk) getSliceSize() int64 {
|
|||||||
// r = strings.ReplaceAll(r, "+", "%20")
|
// r = strings.ReplaceAll(r, "+", "%20")
|
||||||
// return r
|
// return r
|
||||||
// }
|
// }
|
||||||
|
|
||||||
func DecryptMd5(encryptMd5 string) string {
|
|
||||||
if _, err := hex.DecodeString(encryptMd5); err == nil {
|
|
||||||
return encryptMd5
|
|
||||||
}
|
|
||||||
|
|
||||||
var out strings.Builder
|
|
||||||
out.Grow(len(encryptMd5))
|
|
||||||
for i, n := 0, int64(0); i < len(encryptMd5); i++ {
|
|
||||||
if i == 9 {
|
|
||||||
n = int64(unicode.ToLower(rune(encryptMd5[i])) - 'g')
|
|
||||||
} else {
|
|
||||||
n, _ = strconv.ParseInt(encryptMd5[i:i+1], 16, 64)
|
|
||||||
}
|
|
||||||
out.WriteString(strconv.FormatInt(n^int64(15&i), 16))
|
|
||||||
}
|
|
||||||
|
|
||||||
encryptMd5 = out.String()
|
|
||||||
return encryptMd5[8:16] + encryptMd5[:8] + encryptMd5[24:32] + encryptMd5[16:24]
|
|
||||||
}
|
|
||||||
|
|
||||||
func EncryptMd5(originalMd5 string) string {
|
|
||||||
reversed := originalMd5[8:16] + originalMd5[:8] + originalMd5[24:32] + originalMd5[16:24]
|
|
||||||
|
|
||||||
var out strings.Builder
|
|
||||||
out.Grow(len(reversed))
|
|
||||||
for i, n := 0, int64(0); i < len(reversed); i++ {
|
|
||||||
n, _ = strconv.ParseInt(reversed[i:i+1], 16, 64)
|
|
||||||
n ^= int64(15 & i)
|
|
||||||
if i == 9 {
|
|
||||||
out.WriteRune(rune(n) + 'g')
|
|
||||||
} else {
|
|
||||||
out.WriteString(strconv.FormatInt(n, 16))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out.String()
|
|
||||||
}
|
|
||||||
|
@ -137,19 +137,13 @@ func (d *BaiduPhoto) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
case *File:
|
case *File:
|
||||||
return d.linkFile(ctx, file, args)
|
return d.linkFile(ctx, file, args)
|
||||||
case *AlbumFile:
|
case *AlbumFile:
|
||||||
// 处理共享相册
|
f, err := d.CopyAlbumFile(ctx, file)
|
||||||
if d.Uk != file.Uk {
|
if err != nil {
|
||||||
// 有概率无法获取到链接
|
return nil, err
|
||||||
return d.linkAlbum(ctx, file, args)
|
|
||||||
|
|
||||||
// 接口被限制,只能使用cookie
|
|
||||||
// f, err := d.CopyAlbumFile(ctx, file)
|
|
||||||
// if err != nil {
|
|
||||||
// return nil, err
|
|
||||||
// }
|
|
||||||
// return d.linkFile(ctx, f, args)
|
|
||||||
}
|
}
|
||||||
return d.linkFile(ctx, &file.File, args)
|
return d.linkFile(ctx, f, args)
|
||||||
|
// 有概率无法获取到链接
|
||||||
|
//return d.linkAlbum(ctx, file, args)
|
||||||
}
|
}
|
||||||
return nil, errs.NotFile
|
return nil, errs.NotFile
|
||||||
}
|
}
|
||||||
@ -267,7 +261,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
if i == count {
|
if i == count {
|
||||||
byteSize = lastBlockSize
|
byteSize = lastBlockSize
|
||||||
}
|
}
|
||||||
_, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
|
_, err := io.CopyN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
|
||||||
if err != nil && err != io.EOF {
|
if err != nil && err != io.EOF {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -72,7 +72,7 @@ func (c *File) Thumb() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *File) GetHash() utils.HashInfo {
|
func (c *File) GetHash() utils.HashInfo {
|
||||||
return utils.NewHashInfo(utils.MD5, DecryptMd5(c.Md5))
|
return utils.NewHashInfo(utils.MD5, c.Md5)
|
||||||
}
|
}
|
||||||
|
|
||||||
/*相册部分*/
|
/*相册部分*/
|
||||||
|
@ -2,12 +2,8 @@ package baiduphoto
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/hex"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -25,8 +21,8 @@ const (
|
|||||||
FILE_API_URL_V2 = API_URL + "/file/v2"
|
FILE_API_URL_V2 = API_URL + "/file/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (d *BaiduPhoto) Request(client *resty.Client, furl string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
func (d *BaiduPhoto) Request(furl string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||||
req := client.R().
|
req := base.RestyClient.R().
|
||||||
SetQueryParam("access_token", d.AccessToken)
|
SetQueryParam("access_token", d.AccessToken)
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
@ -92,11 +88,11 @@ func (d *BaiduPhoto) refreshToken() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *BaiduPhoto) Get(furl string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
func (d *BaiduPhoto) Get(furl string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||||
return d.Request(base.RestyClient, furl, http.MethodGet, callback, resp)
|
return d.Request(furl, http.MethodGet, callback, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *BaiduPhoto) Post(furl string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
func (d *BaiduPhoto) Post(furl string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||||
return d.Request(base.RestyClient, furl, http.MethodPost, callback, resp)
|
return d.Request(furl, http.MethodPost, callback, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取所有文件
|
// 获取所有文件
|
||||||
@ -342,33 +338,24 @@ func (d *BaiduPhoto) linkAlbum(ctx context.Context, file *AlbumFile, args model.
|
|||||||
headers["X-Forwarded-For"] = args.IP
|
headers["X-Forwarded-For"] = args.IP
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := d.Request(base.NoRedirectClient, ALBUM_API_URL+"/download", http.MethodHead, func(r *resty.Request) {
|
res, err := base.NoRedirectClient.R().
|
||||||
r.SetContext(ctx)
|
SetContext(ctx).
|
||||||
r.SetHeaders(headers)
|
SetHeaders(headers).
|
||||||
r.SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"fsid": fmt.Sprint(file.Fsid),
|
"access_token": d.AccessToken,
|
||||||
"album_id": file.AlbumID,
|
"fsid": fmt.Sprint(file.Fsid),
|
||||||
"tid": fmt.Sprint(file.Tid),
|
"album_id": file.AlbumID,
|
||||||
"uk": fmt.Sprint(file.Uk),
|
"tid": fmt.Sprint(file.Tid),
|
||||||
})
|
"uk": fmt.Sprint(file.Uk),
|
||||||
}, nil)
|
}).
|
||||||
|
Head(ALBUM_API_URL + "/download")
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if resp.StatusCode() != 302 {
|
|
||||||
return nil, fmt.Errorf("not found 302 redirect")
|
|
||||||
}
|
|
||||||
|
|
||||||
location := resp.Header().Get("Location")
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
link := &model.Link{
|
link := &model.Link{
|
||||||
URL: location,
|
URL: res.Header().Get("location"),
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
"User-Agent": []string{headers["User-Agent"]},
|
"User-Agent": []string{headers["User-Agent"]},
|
||||||
"Referer": []string{"https://photo.baidu.com/"},
|
"Referer": []string{"https://photo.baidu.com/"},
|
||||||
@ -388,36 +375,22 @@ func (d *BaiduPhoto) linkFile(ctx context.Context, file *File, args model.LinkAr
|
|||||||
headers["X-Forwarded-For"] = args.IP
|
headers["X-Forwarded-For"] = args.IP
|
||||||
}
|
}
|
||||||
|
|
||||||
// var downloadUrl struct {
|
var downloadUrl struct {
|
||||||
// Dlink string `json:"dlink"`
|
Dlink string `json:"dlink"`
|
||||||
// }
|
}
|
||||||
// _, err := d.Get(FILE_API_URL_V1+"/download", func(r *resty.Request) {
|
_, err := d.Get(FILE_API_URL_V2+"/download", func(r *resty.Request) {
|
||||||
// r.SetContext(ctx)
|
|
||||||
// r.SetHeaders(headers)
|
|
||||||
// r.SetQueryParams(map[string]string{
|
|
||||||
// "fsid": fmt.Sprint(file.Fsid),
|
|
||||||
// })
|
|
||||||
// }, &downloadUrl)
|
|
||||||
|
|
||||||
resp, err := d.Request(base.NoRedirectClient, FILE_API_URL_V1+"/download", http.MethodHead, func(r *resty.Request) {
|
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
r.SetHeaders(headers)
|
r.SetHeaders(headers)
|
||||||
r.SetQueryParams(map[string]string{
|
r.SetQueryParams(map[string]string{
|
||||||
"fsid": fmt.Sprint(file.Fsid),
|
"fsid": fmt.Sprint(file.Fsid),
|
||||||
})
|
})
|
||||||
}, nil)
|
}, &downloadUrl)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if resp.StatusCode() != 302 {
|
|
||||||
return nil, fmt.Errorf("not found 302 redirect")
|
|
||||||
}
|
|
||||||
|
|
||||||
location := resp.Header().Get("Location")
|
|
||||||
link := &model.Link{
|
link := &model.Link{
|
||||||
URL: location,
|
URL: downloadUrl.Dlink,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
"User-Agent": []string{headers["User-Agent"]},
|
"User-Agent": []string{headers["User-Agent"]},
|
||||||
"Referer": []string{"https://photo.baidu.com/"},
|
"Referer": []string{"https://photo.baidu.com/"},
|
||||||
@ -480,40 +453,3 @@ func (d *BaiduPhoto) uInfo() (*UInfo, error) {
|
|||||||
}
|
}
|
||||||
return &info, nil
|
return &info, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func DecryptMd5(encryptMd5 string) string {
|
|
||||||
if _, err := hex.DecodeString(encryptMd5); err == nil {
|
|
||||||
return encryptMd5
|
|
||||||
}
|
|
||||||
|
|
||||||
var out strings.Builder
|
|
||||||
out.Grow(len(encryptMd5))
|
|
||||||
for i, n := 0, int64(0); i < len(encryptMd5); i++ {
|
|
||||||
if i == 9 {
|
|
||||||
n = int64(unicode.ToLower(rune(encryptMd5[i])) - 'g')
|
|
||||||
} else {
|
|
||||||
n, _ = strconv.ParseInt(encryptMd5[i:i+1], 16, 64)
|
|
||||||
}
|
|
||||||
out.WriteString(strconv.FormatInt(n^int64(15&i), 16))
|
|
||||||
}
|
|
||||||
|
|
||||||
encryptMd5 = out.String()
|
|
||||||
return encryptMd5[8:16] + encryptMd5[:8] + encryptMd5[24:32] + encryptMd5[16:24]
|
|
||||||
}
|
|
||||||
|
|
||||||
func EncryptMd5(originalMd5 string) string {
|
|
||||||
reversed := originalMd5[8:16] + originalMd5[:8] + originalMd5[24:32] + originalMd5[16:24]
|
|
||||||
|
|
||||||
var out strings.Builder
|
|
||||||
out.Grow(len(reversed))
|
|
||||||
for i, n := 0, int64(0); i < len(reversed); i++ {
|
|
||||||
n, _ = strconv.ParseInt(reversed[i:i+1], 16, 64)
|
|
||||||
n ^= int64(15 & i)
|
|
||||||
if i == 9 {
|
|
||||||
out.WriteRune(rune(n) + 'g')
|
|
||||||
} else {
|
|
||||||
out.WriteString(strconv.FormatInt(n, 16))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out.String()
|
|
||||||
}
|
|
||||||
|
@ -67,9 +67,7 @@ func (d *ChaoXing) Init(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *ChaoXing) Drop(ctx context.Context) error {
|
func (d *ChaoXing) Drop(ctx context.Context) error {
|
||||||
if d.cron != nil {
|
d.cron.Stop()
|
||||||
d.cron.Stop()
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -231,7 +229,7 @@ func (d *ChaoXing) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_, err = utils.CopyWithBuffer(filePart, stream)
|
_, err = io.Copy(filePart, stream)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
package chaoxing
|
package chaoxing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"strconv"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
@ -90,59 +88,44 @@ type UserAuth struct {
|
|||||||
} `json:"operationAuth"`
|
} `json:"operationAuth"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// 手机端学习通上传的文件的json内容(content字段)与网页端上传的有所不同
|
|
||||||
// 网页端json `"puid": 54321, "size": 12345`
|
|
||||||
// 手机端json `"puid": "54321". "size": "12345"`
|
|
||||||
type int_str int
|
|
||||||
|
|
||||||
// json 字符串数字和纯数字解析
|
|
||||||
func (ios *int_str) UnmarshalJSON(data []byte) error {
|
|
||||||
intValue, err := strconv.Atoi(string(bytes.Trim(data, "\"")))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
*ios = int_str(intValue)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type File struct {
|
type File struct {
|
||||||
Cataid int `json:"cataid"`
|
Cataid int `json:"cataid"`
|
||||||
Cfid int `json:"cfid"`
|
Cfid int `json:"cfid"`
|
||||||
Content struct {
|
Content struct {
|
||||||
Cfid int `json:"cfid"`
|
Cfid int `json:"cfid"`
|
||||||
Pid int `json:"pid"`
|
Pid int `json:"pid"`
|
||||||
FolderName string `json:"folderName"`
|
FolderName string `json:"folderName"`
|
||||||
ShareType int `json:"shareType"`
|
ShareType int `json:"shareType"`
|
||||||
Preview string `json:"preview"`
|
Preview string `json:"preview"`
|
||||||
Filetype string `json:"filetype"`
|
Filetype string `json:"filetype"`
|
||||||
PreviewURL string `json:"previewUrl"`
|
PreviewURL string `json:"previewUrl"`
|
||||||
IsImg bool `json:"isImg"`
|
IsImg bool `json:"isImg"`
|
||||||
ParentPath string `json:"parentPath"`
|
ParentPath string `json:"parentPath"`
|
||||||
Icon string `json:"icon"`
|
Icon string `json:"icon"`
|
||||||
Suffix string `json:"suffix"`
|
Suffix string `json:"suffix"`
|
||||||
Duration int `json:"duration"`
|
Duration int `json:"duration"`
|
||||||
Pantype string `json:"pantype"`
|
Pantype string `json:"pantype"`
|
||||||
Puid int_str `json:"puid"`
|
Puid int `json:"puid"`
|
||||||
Filepath string `json:"filepath"`
|
Filepath string `json:"filepath"`
|
||||||
Crc string `json:"crc"`
|
Crc string `json:"crc"`
|
||||||
Isfile bool `json:"isfile"`
|
Isfile bool `json:"isfile"`
|
||||||
Residstr string `json:"residstr"`
|
Residstr string `json:"residstr"`
|
||||||
ObjectID string `json:"objectId"`
|
ObjectID string `json:"objectId"`
|
||||||
Extinfo string `json:"extinfo"`
|
Extinfo string `json:"extinfo"`
|
||||||
Thumbnail string `json:"thumbnail"`
|
Thumbnail string `json:"thumbnail"`
|
||||||
Creator int `json:"creator"`
|
Creator int `json:"creator"`
|
||||||
ResTypeValue int `json:"resTypeValue"`
|
ResTypeValue int `json:"resTypeValue"`
|
||||||
UploadDateFormat string `json:"uploadDateFormat"`
|
UploadDateFormat string `json:"uploadDateFormat"`
|
||||||
DisableOpt bool `json:"disableOpt"`
|
DisableOpt bool `json:"disableOpt"`
|
||||||
DownPath string `json:"downPath"`
|
DownPath string `json:"downPath"`
|
||||||
Sort int `json:"sort"`
|
Sort int `json:"sort"`
|
||||||
Topsort int `json:"topsort"`
|
Topsort int `json:"topsort"`
|
||||||
Restype string `json:"restype"`
|
Restype string `json:"restype"`
|
||||||
Size int_str `json:"size"`
|
Size int `json:"size"`
|
||||||
UploadDate int64 `json:"uploadDate"`
|
UploadDate string `json:"uploadDate"`
|
||||||
FileSize string `json:"fileSize"`
|
FileSize string `json:"fileSize"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
FileID string `json:"fileId"`
|
FileID string `json:"fileId"`
|
||||||
} `json:"content"`
|
} `json:"content"`
|
||||||
CreatorID int `json:"creatorId"`
|
CreatorID int `json:"creatorId"`
|
||||||
DesID string `json:"des_id"`
|
DesID string `json:"des_id"`
|
||||||
@ -191,67 +174,68 @@ type UploadFileDataRsp struct {
|
|||||||
Resid int64 `json:"resid"`
|
Resid int64 `json:"resid"`
|
||||||
Puid int `json:"puid"`
|
Puid int `json:"puid"`
|
||||||
Data struct {
|
Data struct {
|
||||||
DisableOpt bool `json:"disableOpt"`
|
DisableOpt bool `json:"disableOpt"`
|
||||||
Resid int64 `json:"resid"`
|
Resid int64 `json:"resid"`
|
||||||
Crc string `json:"crc"`
|
Crc string `json:"crc"`
|
||||||
Puid int `json:"puid"`
|
Puid int `json:"puid"`
|
||||||
Isfile bool `json:"isfile"`
|
Isfile bool `json:"isfile"`
|
||||||
Pantype string `json:"pantype"`
|
Pantype string `json:"pantype"`
|
||||||
Size int `json:"size"`
|
Size int `json:"size"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
ObjectID string `json:"objectId"`
|
ObjectID string `json:"objectId"`
|
||||||
Restype string `json:"restype"`
|
Restype string `json:"restype"`
|
||||||
UploadDate int64 `json:"uploadDate"`
|
UploadDate time.Time `json:"uploadDate"`
|
||||||
ModifyDate int64 `json:"modifyDate"`
|
ModifyDate time.Time `json:"modifyDate"`
|
||||||
UploadDateFormat string `json:"uploadDateFormat"`
|
UploadDateFormat string `json:"uploadDateFormat"`
|
||||||
Residstr string `json:"residstr"`
|
Residstr string `json:"residstr"`
|
||||||
Suffix string `json:"suffix"`
|
Suffix string `json:"suffix"`
|
||||||
Preview string `json:"preview"`
|
Preview string `json:"preview"`
|
||||||
Thumbnail string `json:"thumbnail"`
|
Thumbnail string `json:"thumbnail"`
|
||||||
Creator int `json:"creator"`
|
Creator int `json:"creator"`
|
||||||
Duration int `json:"duration"`
|
Duration int `json:"duration"`
|
||||||
IsImg bool `json:"isImg"`
|
IsImg bool `json:"isImg"`
|
||||||
PreviewURL string `json:"previewUrl"`
|
PreviewURL string `json:"previewUrl"`
|
||||||
Filetype string `json:"filetype"`
|
Filetype string `json:"filetype"`
|
||||||
Filepath string `json:"filepath"`
|
Filepath string `json:"filepath"`
|
||||||
Sort int `json:"sort"`
|
Sort int `json:"sort"`
|
||||||
Topsort int `json:"topsort"`
|
Topsort int `json:"topsort"`
|
||||||
ResTypeValue int `json:"resTypeValue"`
|
ResTypeValue int `json:"resTypeValue"`
|
||||||
Extinfo string `json:"extinfo"`
|
Extinfo string `json:"extinfo"`
|
||||||
} `json:"data"`
|
} `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
type UploadDoneParam struct {
|
type UploadDoneParam struct {
|
||||||
Cataid string `json:"cataid"`
|
Cataid string `json:"cataid"`
|
||||||
Key string `json:"key"`
|
Key string `json:"key"`
|
||||||
Param struct {
|
Param struct {
|
||||||
DisableOpt bool `json:"disableOpt"`
|
DisableOpt bool `json:"disableOpt"`
|
||||||
Resid int64 `json:"resid"`
|
Resid int64 `json:"resid"`
|
||||||
Crc string `json:"crc"`
|
Crc string `json:"crc"`
|
||||||
Puid int `json:"puid"`
|
Puid int `json:"puid"`
|
||||||
Isfile bool `json:"isfile"`
|
Isfile bool `json:"isfile"`
|
||||||
Pantype string `json:"pantype"`
|
Pantype string `json:"pantype"`
|
||||||
Size int `json:"size"`
|
Size int `json:"size"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
ObjectID string `json:"objectId"`
|
ObjectID string `json:"objectId"`
|
||||||
Restype string `json:"restype"`
|
Restype string `json:"restype"`
|
||||||
UploadDate int64 `json:"uploadDate"`
|
UploadDate time.Time `json:"uploadDate"`
|
||||||
ModifyDate int64 `json:"modifyDate"`
|
ModifyDate time.Time `json:"modifyDate"`
|
||||||
UploadDateFormat string `json:"uploadDateFormat"`
|
UploadDateFormat string `json:"uploadDateFormat"`
|
||||||
Residstr string `json:"residstr"`
|
Residstr string `json:"residstr"`
|
||||||
Suffix string `json:"suffix"`
|
Suffix string `json:"suffix"`
|
||||||
Preview string `json:"preview"`
|
Preview string `json:"preview"`
|
||||||
Thumbnail string `json:"thumbnail"`
|
Thumbnail string `json:"thumbnail"`
|
||||||
Creator int `json:"creator"`
|
Creator int `json:"creator"`
|
||||||
Duration int `json:"duration"`
|
Duration int `json:"duration"`
|
||||||
IsImg bool `json:"isImg"`
|
IsImg bool `json:"isImg"`
|
||||||
PreviewURL string `json:"previewUrl"`
|
PreviewURL string `json:"previewUrl"`
|
||||||
Filetype string `json:"filetype"`
|
Filetype string `json:"filetype"`
|
||||||
Filepath string `json:"filepath"`
|
Filepath string `json:"filepath"`
|
||||||
Sort int `json:"sort"`
|
Sort int `json:"sort"`
|
||||||
Topsort int `json:"topsort"`
|
Topsort int `json:"topsort"`
|
||||||
ResTypeValue int `json:"resTypeValue"`
|
ResTypeValue int `json:"resTypeValue"`
|
||||||
Extinfo string `json:"extinfo"`
|
Extinfo string `json:"extinfo"`
|
||||||
} `json:"param"`
|
} `json:"param"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -265,7 +249,10 @@ func fileToObj(f File) *model.Object {
|
|||||||
IsFolder: true,
|
IsFolder: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
paserTime := time.UnixMilli(f.Content.UploadDate)
|
paserTime, err := time.Parse("2006-01-02 15:04", f.Content.UploadDate)
|
||||||
|
if err != nil {
|
||||||
|
paserTime = time.Now()
|
||||||
|
}
|
||||||
return &model.Object{
|
return &model.Object{
|
||||||
ID: fmt.Sprintf("%d$%s", f.ID, f.Content.FileID),
|
ID: fmt.Sprintf("%d$%s", f.ID, f.Content.FileID),
|
||||||
Name: f.Content.Name,
|
Name: f.Content.Name,
|
||||||
|
@ -79,7 +79,7 @@ func (d *ChaoXing) GetFiles(parent string) ([]File, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if resp.Result != 1 {
|
if resp.Result != 1 {
|
||||||
msg := fmt.Sprintf("error code is:%d", resp.Result)
|
msg:=fmt.Sprintf("error code is:%d", resp.Result)
|
||||||
return nil, errors.New(msg)
|
return nil, errors.New(msg)
|
||||||
}
|
}
|
||||||
if len(resp.List) > 0 {
|
if len(resp.List) > 0 {
|
||||||
@ -97,12 +97,8 @@ func (d *ChaoXing) GetFiles(parent string) ([]File, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, file := range resps.List {
|
if len(resps.List) > 0 {
|
||||||
// 手机端超星上传的文件没有fileID字段,但ObjectID与fileID相同,可代替
|
files = append(files, resps.List...)
|
||||||
if file.Content.FileID == "" {
|
|
||||||
file.Content.FileID = file.Content.ObjectID
|
|
||||||
}
|
|
||||||
files = append(files, file)
|
|
||||||
}
|
}
|
||||||
return files, nil
|
return files, nil
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -72,9 +71,6 @@ func (d *Cloudreve) Link(ctx context.Context, file model.Obj, args model.LinkArg
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if strings.HasPrefix(dUrl, "/api") {
|
|
||||||
dUrl = d.Address + dUrl
|
|
||||||
}
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: dUrl,
|
URL: dUrl,
|
||||||
}, nil
|
}, nil
|
||||||
@ -91,7 +87,7 @@ func (d *Cloudreve) MakeDir(ctx context.Context, parentDir model.Obj, dirName st
|
|||||||
func (d *Cloudreve) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Cloudreve) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
body := base.Json{
|
body := base.Json{
|
||||||
"action": "move",
|
"action": "move",
|
||||||
"src_dir": path.Dir(srcObj.GetPath()),
|
"src_dir": srcObj.GetPath(),
|
||||||
"dst": dstDir.GetPath(),
|
"dst": dstDir.GetPath(),
|
||||||
"src": convertSrc(srcObj),
|
"src": convertSrc(srcObj),
|
||||||
}
|
}
|
||||||
@ -113,7 +109,7 @@ func (d *Cloudreve) Rename(ctx context.Context, srcObj model.Obj, newName string
|
|||||||
|
|
||||||
func (d *Cloudreve) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Cloudreve) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
body := base.Json{
|
body := base.Json{
|
||||||
"src_dir": path.Dir(srcObj.GetPath()),
|
"src_dir": srcObj.GetPath(),
|
||||||
"dst": dstDir.GetPath(),
|
"dst": dstDir.GetPath(),
|
||||||
"src": convertSrc(srcObj),
|
"src": convertSrc(srcObj),
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@ package crypt
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
"io"
|
"io"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"regexp"
|
"regexp"
|
||||||
@ -13,7 +14,6 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
@ -160,7 +160,7 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
|||||||
// discarding hash as it's encrypted
|
// discarding hash as it's encrypted
|
||||||
}
|
}
|
||||||
if d.Thumbnail && thumb == "" {
|
if d.Thumbnail && thumb == "" {
|
||||||
thumb = utils.EncodePath(common.GetApiUrl(nil)+stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
|
thumb = utils.EncodePath(common.GetApiUrl(nil) + stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
|
||||||
}
|
}
|
||||||
if !ok && !d.Thumbnail {
|
if !ok && !d.Thumbnail {
|
||||||
result = append(result, &objRes)
|
result = append(result, &objRes)
|
||||||
@ -389,11 +389,10 @@ func (d *Crypt) Put(ctx context.Context, dstDir model.Obj, streamer model.FileSt
|
|||||||
Modified: streamer.ModTime(),
|
Modified: streamer.ModTime(),
|
||||||
IsFolder: streamer.IsDir(),
|
IsFolder: streamer.IsDir(),
|
||||||
},
|
},
|
||||||
Reader: wrappedIn,
|
Reader: wrappedIn,
|
||||||
Mimetype: "application/octet-stream",
|
Mimetype: "application/octet-stream",
|
||||||
WebPutAsTask: streamer.NeedStore(),
|
WebPutAsTask: streamer.NeedStore(),
|
||||||
ForceStreamUpload: true,
|
Exist: streamer.GetExist(),
|
||||||
Exist: streamer.GetExist(),
|
|
||||||
}
|
}
|
||||||
err = op.Put(ctx, d.remoteStorage, dstDirActualPath, streamOut, up, false)
|
err = op.Put(ctx, d.remoteStorage, dstDirActualPath, streamOut, up, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -45,25 +45,7 @@ func (d *Dropbox) Init(ctx context.Context) error {
|
|||||||
if result != query {
|
if result != query {
|
||||||
return fmt.Errorf("failed to check user: %s", string(res))
|
return fmt.Errorf("failed to check user: %s", string(res))
|
||||||
}
|
}
|
||||||
d.RootNamespaceId, err = d.GetRootNamespaceId(ctx)
|
return nil
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Dropbox) GetRootNamespaceId(ctx context.Context) (string, error) {
|
|
||||||
res, err := d.request("/2/users/get_current_account", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetBody(nil)
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
var currentAccountResp CurrentAccountResp
|
|
||||||
err = utils.Json.Unmarshal(res, ¤tAccountResp)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
rootNamespaceId := currentAccountResp.RootInfo.RootNamespaceId
|
|
||||||
return rootNamespaceId, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Dropbox) Drop(ctx context.Context) error {
|
func (d *Dropbox) Drop(ctx context.Context) error {
|
||||||
|
@ -17,8 +17,7 @@ type Addition struct {
|
|||||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
|
|
||||||
AccessToken string
|
AccessToken string
|
||||||
RootNamespaceId string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -23,13 +23,6 @@ type RefreshTokenErrorResp struct {
|
|||||||
ErrorDescription string `json:"error_description"`
|
ErrorDescription string `json:"error_description"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type CurrentAccountResp struct {
|
|
||||||
RootInfo struct {
|
|
||||||
RootNamespaceId string `json:"root_namespace_id"`
|
|
||||||
HomeNamespaceId string `json:"home_namespace_id"`
|
|
||||||
} `json:"root_info"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type File struct {
|
type File struct {
|
||||||
Tag string `json:".tag"`
|
Tag string `json:".tag"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
@ -46,22 +46,12 @@ func (d *Dropbox) refreshToken() error {
|
|||||||
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||||
if d.RootNamespaceId != "" {
|
if method == http.MethodPost {
|
||||||
apiPathRootJson, err := utils.Json.MarshalToString(map[string]interface{}{
|
req.SetHeader("Content-Type", "application/json")
|
||||||
".tag": "root",
|
|
||||||
"root": d.RootNamespaceId,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
req.SetHeader("Dropbox-API-Path-Root", apiPathRootJson)
|
|
||||||
}
|
}
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
}
|
}
|
||||||
if method == http.MethodPost && req.Body != nil {
|
|
||||||
req.SetHeader("Content-Type", "application/json")
|
|
||||||
}
|
|
||||||
var e ErrorResp
|
var e ErrorResp
|
||||||
req.SetError(&e)
|
req.SetError(&e)
|
||||||
res, err := req.Execute(method, d.base+uri)
|
res, err := req.Execute(method, d.base+uri)
|
||||||
|
@ -1,132 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"golang.org/x/oauth2"
|
|
||||||
"golang.org/x/oauth2/clientcredentials"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FebBox struct {
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
accessToken string
|
|
||||||
oauth2Token oauth2.TokenSource
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) GetAddition() driver.Additional {
|
|
||||||
return &d.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Init(ctx context.Context) error {
|
|
||||||
// 初始化 oauth2Config
|
|
||||||
oauth2Config := &clientcredentials.Config{
|
|
||||||
ClientID: d.ClientID,
|
|
||||||
ClientSecret: d.ClientSecret,
|
|
||||||
AuthStyle: oauth2.AuthStyleInParams,
|
|
||||||
TokenURL: "https://api.febbox.com/oauth/token",
|
|
||||||
}
|
|
||||||
|
|
||||||
d.initializeOAuth2Token(ctx, oauth2Config, d.Addition.RefreshToken)
|
|
||||||
|
|
||||||
token, err := d.oauth2Token.Token()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.accessToken = token.AccessToken
|
|
||||||
d.Addition.RefreshToken = token.RefreshToken
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Drop(ctx context.Context) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
files, err := d.getFilesList(dir.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
|
||||||
return fileToObj(src), nil
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
var ip string
|
|
||||||
if d.Addition.UserIP != "" {
|
|
||||||
ip = d.Addition.UserIP
|
|
||||||
} else {
|
|
||||||
ip = args.IP
|
|
||||||
}
|
|
||||||
|
|
||||||
url, err := d.getDownloadLink(file.GetID(), ip)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &model.Link{
|
|
||||||
URL: url,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
err := d.makeDir(parentDir.GetID(), dirName)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
err := d.move(srcObj.GetID(), dstDir.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
err := d.rename(srcObj.GetID(), newName)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
err := d.copy(srcObj.GetID(), dstDir.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
err := d.remove(obj.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*FebBox)(nil)
|
|
@ -1,36 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
driver.RootID
|
|
||||||
ClientID string `json:"client_id" required:"true" default:""`
|
|
||||||
ClientSecret string `json:"client_secret" required:"true" default:""`
|
|
||||||
RefreshToken string
|
|
||||||
SortRule string `json:"sort_rule" required:"true" type:"select" options:"size_asc,size_desc,name_asc,name_desc,update_asc,update_desc,ext_asc,ext_desc" default:"name_asc"`
|
|
||||||
PageSize int64 `json:"page_size" required:"true" type:"number" default:"100" help:"list api per page size of FebBox driver"`
|
|
||||||
UserIP string `json:"user_ip" default:"" help:"user ip address for download link which can speed up the download"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "FebBox",
|
|
||||||
LocalSort: false,
|
|
||||||
OnlyLocal: false,
|
|
||||||
OnlyProxy: false,
|
|
||||||
NoCache: false,
|
|
||||||
NoUpload: true,
|
|
||||||
NeedMs: false,
|
|
||||||
DefaultRoot: "0",
|
|
||||||
CheckStatus: false,
|
|
||||||
Alert: "",
|
|
||||||
NoOverwriteUpload: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &FebBox{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,88 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"golang.org/x/oauth2"
|
|
||||||
"golang.org/x/oauth2/clientcredentials"
|
|
||||||
)
|
|
||||||
|
|
||||||
type customTokenSource struct {
|
|
||||||
config *clientcredentials.Config
|
|
||||||
ctx context.Context
|
|
||||||
refreshToken string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *customTokenSource) Token() (*oauth2.Token, error) {
|
|
||||||
v := url.Values{}
|
|
||||||
if c.refreshToken != "" {
|
|
||||||
v.Set("grant_type", "refresh_token")
|
|
||||||
v.Set("refresh_token", c.refreshToken)
|
|
||||||
} else {
|
|
||||||
v.Set("grant_type", "client_credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
v.Set("client_id", c.config.ClientID)
|
|
||||||
v.Set("client_secret", c.config.ClientSecret)
|
|
||||||
|
|
||||||
req, err := http.NewRequest("POST", c.config.TokenURL, strings.NewReader(v.Encode()))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
|
||||||
|
|
||||||
resp, err := http.DefaultClient.Do(req.WithContext(c.ctx))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, errors.New("oauth2: cannot fetch token")
|
|
||||||
}
|
|
||||||
|
|
||||||
var tokenResp struct {
|
|
||||||
Code int `json:"code"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
Data struct {
|
|
||||||
AccessToken string `json:"access_token"`
|
|
||||||
ExpiresIn int64 `json:"expires_in"`
|
|
||||||
TokenType string `json:"token_type"`
|
|
||||||
Scope string `json:"scope"`
|
|
||||||
RefreshToken string `json:"refresh_token"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&tokenResp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if tokenResp.Code != 1 {
|
|
||||||
return nil, errors.New("oauth2: server response error")
|
|
||||||
}
|
|
||||||
|
|
||||||
c.refreshToken = tokenResp.Data.RefreshToken
|
|
||||||
|
|
||||||
token := &oauth2.Token{
|
|
||||||
AccessToken: tokenResp.Data.AccessToken,
|
|
||||||
TokenType: tokenResp.Data.TokenType,
|
|
||||||
RefreshToken: tokenResp.Data.RefreshToken,
|
|
||||||
Expiry: time.Now().Add(time.Duration(tokenResp.Data.ExpiresIn) * time.Second),
|
|
||||||
}
|
|
||||||
|
|
||||||
return token, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) initializeOAuth2Token(ctx context.Context, oauth2Config *clientcredentials.Config, refreshToken string) {
|
|
||||||
d.oauth2Token = oauth2.ReuseTokenSource(nil, &customTokenSource{
|
|
||||||
config: oauth2Config,
|
|
||||||
ctx: ctx,
|
|
||||||
refreshToken: refreshToken,
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,123 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
|
||||||
"strconv"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ErrResp struct {
|
|
||||||
ErrorCode int64 `json:"code"`
|
|
||||||
ErrorMsg string `json:"msg"`
|
|
||||||
ServerRunTime float64 `json:"server_runtime"`
|
|
||||||
ServerName string `json:"server_name"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *ErrResp) IsError() bool {
|
|
||||||
return e.ErrorCode != 0 || e.ErrorMsg != "" || e.ServerRunTime != 0 || e.ServerName != ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *ErrResp) Error() string {
|
|
||||||
return fmt.Sprintf("ErrorCode: %d ,Error: %s ,ServerRunTime: %f ,ServerName: %s", e.ErrorCode, e.ErrorMsg, e.ServerRunTime, e.ServerName)
|
|
||||||
}
|
|
||||||
|
|
||||||
type FileListResp struct {
|
|
||||||
Code int `json:"code"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
Data struct {
|
|
||||||
FileList []File `json:"file_list"`
|
|
||||||
ShowType string `json:"show_type"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Rules struct {
|
|
||||||
AllowCopy int64 `json:"allow_copy"`
|
|
||||||
AllowDelete int64 `json:"allow_delete"`
|
|
||||||
AllowDownload int64 `json:"allow_download"`
|
|
||||||
AllowComment int64 `json:"allow_comment"`
|
|
||||||
HideLocation int64 `json:"hide_location"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type File struct {
|
|
||||||
Fid int64 `json:"fid"`
|
|
||||||
UID int64 `json:"uid"`
|
|
||||||
FileSize int64 `json:"file_size"`
|
|
||||||
Path string `json:"path"`
|
|
||||||
FileName string `json:"file_name"`
|
|
||||||
Ext string `json:"ext"`
|
|
||||||
AddTime int64 `json:"add_time"`
|
|
||||||
FileCreateTime int64 `json:"file_create_time"`
|
|
||||||
FileUpdateTime int64 `json:"file_update_time"`
|
|
||||||
ParentID int64 `json:"parent_id"`
|
|
||||||
UpdateTime int64 `json:"update_time"`
|
|
||||||
LastOpenTime int64 `json:"last_open_time"`
|
|
||||||
IsDir int64 `json:"is_dir"`
|
|
||||||
Epub int64 `json:"epub"`
|
|
||||||
IsMusicList int64 `json:"is_music_list"`
|
|
||||||
OssFid int64 `json:"oss_fid"`
|
|
||||||
Faststart int64 `json:"faststart"`
|
|
||||||
HasVideoQuality int64 `json:"has_video_quality"`
|
|
||||||
TotalDownload int64 `json:"total_download"`
|
|
||||||
Status int64 `json:"status"`
|
|
||||||
Remark string `json:"remark"`
|
|
||||||
OldHash string `json:"old_hash"`
|
|
||||||
Hash string `json:"hash"`
|
|
||||||
HashType string `json:"hash_type"`
|
|
||||||
FromUID int64 `json:"from_uid"`
|
|
||||||
FidOrg int64 `json:"fid_org"`
|
|
||||||
ShareID int64 `json:"share_id"`
|
|
||||||
InvitePermission int64 `json:"invite_permission"`
|
|
||||||
ThumbSmall string `json:"thumb_small"`
|
|
||||||
ThumbSmallWidth int64 `json:"thumb_small_width"`
|
|
||||||
ThumbSmallHeight int64 `json:"thumb_small_height"`
|
|
||||||
Thumb string `json:"thumb"`
|
|
||||||
ThumbWidth int64 `json:"thumb_width"`
|
|
||||||
ThumbHeight int64 `json:"thumb_height"`
|
|
||||||
ThumbBig string `json:"thumb_big"`
|
|
||||||
ThumbBigWidth int64 `json:"thumb_big_width"`
|
|
||||||
ThumbBigHeight int64 `json:"thumb_big_height"`
|
|
||||||
IsCustomThumb int64 `json:"is_custom_thumb"`
|
|
||||||
Photos int64 `json:"photos"`
|
|
||||||
IsAlbum int64 `json:"is_album"`
|
|
||||||
ReadOnly int64 `json:"read_only"`
|
|
||||||
Rules Rules `json:"rules"`
|
|
||||||
IsShared int64 `json:"is_shared"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func fileToObj(f File) *model.ObjThumb {
|
|
||||||
return &model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
ID: strconv.FormatInt(f.Fid, 10),
|
|
||||||
Name: f.FileName,
|
|
||||||
Size: f.FileSize,
|
|
||||||
Ctime: time.Unix(f.FileCreateTime, 0),
|
|
||||||
Modified: time.Unix(f.FileUpdateTime, 0),
|
|
||||||
IsFolder: f.IsDir == 1,
|
|
||||||
HashInfo: utils.NewHashInfo(hash_extend.GCID, f.Hash),
|
|
||||||
},
|
|
||||||
Thumbnail: model.Thumbnail{
|
|
||||||
Thumbnail: f.Thumb,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type FileDownloadResp struct {
|
|
||||||
Code int `json:"code"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
Data []struct {
|
|
||||||
Error int `json:"error"`
|
|
||||||
DownloadURL string `json:"download_url"`
|
|
||||||
Hash string `json:"hash"`
|
|
||||||
HashType string `json:"hash_type"`
|
|
||||||
Fid int `json:"fid"`
|
|
||||||
FileName string `json:"file_name"`
|
|
||||||
ParentID int `json:"parent_id"`
|
|
||||||
FileSize int `json:"file_size"`
|
|
||||||
Ext string `json:"ext"`
|
|
||||||
Thumb string `json:"thumb"`
|
|
||||||
VipLink int `json:"vip_link"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
@ -1,224 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
"net/http"
|
|
||||||
"strconv"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (d *FebBox) refreshTokenByOAuth2() error {
|
|
||||||
token, err := d.oauth2Token.Token()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.Status = "work"
|
|
||||||
d.accessToken = token.AccessToken
|
|
||||||
d.Addition.RefreshToken = token.RefreshToken
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
|
||||||
req := base.RestyClient.R()
|
|
||||||
// 使用oauth2 获取 access_token
|
|
||||||
token, err := d.oauth2Token.Token()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
req.SetAuthScheme(token.TokenType).SetAuthToken(token.AccessToken)
|
|
||||||
|
|
||||||
if callback != nil {
|
|
||||||
callback(req)
|
|
||||||
}
|
|
||||||
if resp != nil {
|
|
||||||
req.SetResult(resp)
|
|
||||||
}
|
|
||||||
var e ErrResp
|
|
||||||
req.SetError(&e)
|
|
||||||
res, err := req.Execute(method, url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch e.ErrorCode {
|
|
||||||
case 0:
|
|
||||||
return res.Body(), nil
|
|
||||||
case 1:
|
|
||||||
return res.Body(), nil
|
|
||||||
case -10001:
|
|
||||||
if e.ServerName != "" {
|
|
||||||
// access_token 过期
|
|
||||||
if err = d.refreshTokenByOAuth2(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return d.request(url, method, callback, resp)
|
|
||||||
} else {
|
|
||||||
return nil, errors.New(e.Error())
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return nil, errors.New(e.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) getFilesList(id string) ([]File, error) {
|
|
||||||
if d.PageSize <= 0 {
|
|
||||||
d.PageSize = 100
|
|
||||||
}
|
|
||||||
res, err := d.listWithLimit(id, d.PageSize)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return *res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) listWithLimit(dirID string, pageLimit int64) (*[]File, error) {
|
|
||||||
var files []File
|
|
||||||
page := int64(1)
|
|
||||||
for {
|
|
||||||
result, err := d.getFiles(dirID, page, pageLimit)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
files = append(files, *result...)
|
|
||||||
if int64(len(*result)) < pageLimit {
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
page++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return &files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) getFiles(dirID string, page, pageLimit int64) (*[]File, error) {
|
|
||||||
var fileList FileListResp
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_list",
|
|
||||||
"parent_id": dirID,
|
|
||||||
"page": strconv.FormatInt(page, 10),
|
|
||||||
"pagelimit": strconv.FormatInt(pageLimit, 10),
|
|
||||||
"order": d.Addition.SortRule,
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, &fileList)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = json.Unmarshal(res, &fileList); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &fileList.Data.FileList, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) getDownloadLink(id string, ip string) (string, error) {
|
|
||||||
var fileDownloadResp FileDownloadResp
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_get_download_url",
|
|
||||||
"fids[]": id,
|
|
||||||
"ip": ip,
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, &fileDownloadResp)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = json.Unmarshal(res, &fileDownloadResp); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
return fileDownloadResp.Data[0].DownloadURL, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) makeDir(id string, name string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "create_dir",
|
|
||||||
"parent_id": id,
|
|
||||||
"name": name,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) move(id string, id2 string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_move",
|
|
||||||
"fids[]": id,
|
|
||||||
"to": id2,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) rename(id string, name string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_rename",
|
|
||||||
"fid": id,
|
|
||||||
"name": name,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) copy(id string, id2 string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_copy",
|
|
||||||
"fids[]": id,
|
|
||||||
"to": id2,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) remove(id string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_delete",
|
|
||||||
"fids[]": id,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
@ -39,7 +39,7 @@ func (d *FTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]m
|
|||||||
if err := d.login(); err != nil {
|
if err := d.login(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
entries, err := d.conn.List(encode(dir.GetPath(), d.Encoding))
|
entries, err := d.conn.List(dir.GetPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -49,7 +49,7 @@ func (d *FTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]m
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
f := model.Object{
|
f := model.Object{
|
||||||
Name: decode(entry.Name, d.Encoding),
|
Name: entry.Name,
|
||||||
Size: int64(entry.Size),
|
Size: int64(entry.Size),
|
||||||
Modified: entry.Time,
|
Modified: entry.Time,
|
||||||
IsFolder: entry.Type == ftp.EntryTypeFolder,
|
IsFolder: entry.Type == ftp.EntryTypeFolder,
|
||||||
@ -64,7 +64,7 @@ func (d *FTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
r := NewFileReader(d.conn, encode(file.GetPath(), d.Encoding), file.GetSize())
|
r := NewFileReader(d.conn, file.GetPath(), file.GetSize())
|
||||||
link := &model.Link{
|
link := &model.Link{
|
||||||
MFile: r,
|
MFile: r,
|
||||||
}
|
}
|
||||||
@ -75,27 +75,21 @@ func (d *FTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string)
|
|||||||
if err := d.login(); err != nil {
|
if err := d.login(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return d.conn.MakeDir(encode(stdpath.Join(parentDir.GetPath(), dirName), d.Encoding))
|
return d.conn.MakeDir(stdpath.Join(parentDir.GetPath(), dirName))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *FTP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *FTP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
if err := d.login(); err != nil {
|
if err := d.login(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return d.conn.Rename(
|
return d.conn.Rename(srcObj.GetPath(), stdpath.Join(dstDir.GetPath(), srcObj.GetName()))
|
||||||
encode(srcObj.GetPath(), d.Encoding),
|
|
||||||
encode(stdpath.Join(dstDir.GetPath(), srcObj.GetName()), d.Encoding),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *FTP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *FTP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
if err := d.login(); err != nil {
|
if err := d.login(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return d.conn.Rename(
|
return d.conn.Rename(srcObj.GetPath(), stdpath.Join(stdpath.Dir(srcObj.GetPath()), newName))
|
||||||
encode(srcObj.GetPath(), d.Encoding),
|
|
||||||
encode(stdpath.Join(stdpath.Dir(srcObj.GetPath()), newName), d.Encoding),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *FTP) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *FTP) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
@ -106,11 +100,10 @@ func (d *FTP) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
if err := d.login(); err != nil {
|
if err := d.login(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
path := encode(obj.GetPath(), d.Encoding)
|
|
||||||
if obj.IsDir() {
|
if obj.IsDir() {
|
||||||
return d.conn.RemoveDirRecur(path)
|
return d.conn.RemoveDirRecur(obj.GetPath())
|
||||||
} else {
|
} else {
|
||||||
return d.conn.Delete(path)
|
return d.conn.Delete(obj.GetPath())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,8 +112,7 @@ func (d *FTP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStream
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
// TODO: support cancel
|
// TODO: support cancel
|
||||||
path := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
return d.conn.Stor(stdpath.Join(dstDir.GetPath(), stream.GetName()), stream)
|
||||||
return d.conn.Stor(encode(path, d.Encoding), stream)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*FTP)(nil)
|
var _ driver.Driver = (*FTP)(nil)
|
||||||
|
@ -3,28 +3,10 @@ package ftp
|
|||||||
import (
|
import (
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/axgle/mahonia"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func encode(str string, encoding string) string {
|
|
||||||
if encoding == "" {
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
encoder := mahonia.NewEncoder(encoding)
|
|
||||||
return encoder.ConvertString(str)
|
|
||||||
}
|
|
||||||
|
|
||||||
func decode(str string, encoding string) string {
|
|
||||||
if encoding == "" {
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
decoder := mahonia.NewDecoder(encoding)
|
|
||||||
return decoder.ConvertString(str)
|
|
||||||
}
|
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
Address string `json:"address" required:"true"`
|
Address string `json:"address" required:"true"`
|
||||||
Encoding string `json:"encoding" required:"true"`
|
|
||||||
Username string `json:"username" required:"true"`
|
Username string `json:"username" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
|
@ -151,7 +151,7 @@ func (d *GooglePhoto) getMedia(id string) (MediaItem, error) {
|
|||||||
var resp MediaItem
|
var resp MediaItem
|
||||||
|
|
||||||
query := map[string]string{
|
query := map[string]string{
|
||||||
"fields": "mediaMetadata,baseUrl,mimeType",
|
"fields": "baseUrl,mimeType",
|
||||||
}
|
}
|
||||||
_, err := d.request(fmt.Sprintf("https://photoslibrary.googleapis.com/v1/mediaItems/%s", id), http.MethodGet, func(req *resty.Request) {
|
_, err := d.request(fmt.Sprintf("https://photoslibrary.googleapis.com/v1/mediaItems/%s", id), http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
|
@ -1,406 +0,0 @@
|
|||||||
package halalcloud
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"crypto/sha1"
|
|
||||||
"fmt"
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/aws/aws-sdk-go/aws"
|
|
||||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
|
||||||
"github.com/aws/aws-sdk-go/aws/session"
|
|
||||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
|
||||||
"github.com/city404/v6-public-rpc-proto/go/v6/common"
|
|
||||||
pbPublicUser "github.com/city404/v6-public-rpc-proto/go/v6/user"
|
|
||||||
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
|
|
||||||
"github.com/rclone/rclone/lib/readers"
|
|
||||||
"github.com/zzzhr1990/go-common-entity/userfile"
|
|
||||||
"io"
|
|
||||||
"net/url"
|
|
||||||
"path"
|
|
||||||
"strconv"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type HalalCloud struct {
|
|
||||||
*HalalCommon
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
|
|
||||||
uploadThread int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) GetAddition() driver.Additional {
|
|
||||||
return &d.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Init(ctx context.Context) error {
|
|
||||||
d.uploadThread, _ = strconv.Atoi(d.UploadThread)
|
|
||||||
if d.uploadThread < 1 || d.uploadThread > 32 {
|
|
||||||
d.uploadThread, d.UploadThread = 3, "3"
|
|
||||||
}
|
|
||||||
|
|
||||||
if d.HalalCommon == nil {
|
|
||||||
d.HalalCommon = &HalalCommon{
|
|
||||||
Common: &Common{},
|
|
||||||
AuthService: &AuthService{
|
|
||||||
appID: func() string {
|
|
||||||
if d.Addition.AppID != "" {
|
|
||||||
return d.Addition.AppID
|
|
||||||
}
|
|
||||||
return AppID
|
|
||||||
}(),
|
|
||||||
appVersion: func() string {
|
|
||||||
if d.Addition.AppVersion != "" {
|
|
||||||
return d.Addition.AppVersion
|
|
||||||
}
|
|
||||||
return AppVersion
|
|
||||||
}(),
|
|
||||||
appSecret: func() string {
|
|
||||||
if d.Addition.AppSecret != "" {
|
|
||||||
return d.Addition.AppSecret
|
|
||||||
}
|
|
||||||
return AppSecret
|
|
||||||
}(),
|
|
||||||
tr: &TokenResp{
|
|
||||||
RefreshToken: d.Addition.RefreshToken,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
UserInfo: &UserInfo{},
|
|
||||||
refreshTokenFunc: func(token string) error {
|
|
||||||
d.Addition.RefreshToken = token
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 防止重复登录
|
|
||||||
if d.Addition.RefreshToken == "" || !d.IsLogin() {
|
|
||||||
as, err := d.NewAuthServiceWithOauth()
|
|
||||||
if err != nil {
|
|
||||||
d.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.HalalCommon.AuthService = as
|
|
||||||
d.SetTokenResp(as.tr)
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
}
|
|
||||||
var err error
|
|
||||||
d.HalalCommon.serv, err = d.NewAuthService(d.Addition.RefreshToken)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Drop(ctx context.Context) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
return d.getFiles(ctx, dir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
return d.getLink(ctx, file, args)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
return d.makeDir(ctx, parentDir, dirName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
return d.move(ctx, srcObj, dstDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
return d.rename(ctx, srcObj, newName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
return d.copy(ctx, srcObj, dstDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
return d.remove(ctx, obj)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
return d.put(ctx, dstDir, stream, up)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) IsLogin() bool {
|
|
||||||
if d.AuthService.tr == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
serv, err := d.NewAuthService(d.Addition.RefreshToken)
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
result, err := pbPublicUser.NewPubUserClient(serv.GetGrpcConnection()).Get(ctx, &pbPublicUser.User{
|
|
||||||
Identity: "",
|
|
||||||
})
|
|
||||||
if result == nil || err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
d.UserInfo.Identity = result.Identity
|
|
||||||
d.UserInfo.CreateTs = result.CreateTs
|
|
||||||
d.UserInfo.Name = result.Name
|
|
||||||
d.UserInfo.UpdateTs = result.UpdateTs
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
type HalalCommon struct {
|
|
||||||
*Common
|
|
||||||
*AuthService // 登录信息
|
|
||||||
*UserInfo // 用户信息
|
|
||||||
refreshTokenFunc func(token string) error
|
|
||||||
serv *AuthService
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) SetTokenResp(tr *TokenResp) {
|
|
||||||
d.Addition.RefreshToken = tr.RefreshToken
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) getFiles(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
|
|
||||||
|
|
||||||
files := make([]model.Obj, 0)
|
|
||||||
limit := int64(100)
|
|
||||||
token := ""
|
|
||||||
client := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection())
|
|
||||||
|
|
||||||
opDir := d.GetCurrentDir(dir)
|
|
||||||
|
|
||||||
for {
|
|
||||||
result, err := client.List(ctx, &pubUserFile.FileListRequest{
|
|
||||||
Parent: &pubUserFile.File{Path: opDir},
|
|
||||||
ListInfo: &common.ScanListRequest{
|
|
||||||
Limit: limit,
|
|
||||||
Token: token,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := 0; len(result.Files) > i; i++ {
|
|
||||||
files = append(files, (*Files)(result.Files[i]))
|
|
||||||
}
|
|
||||||
|
|
||||||
if result.ListInfo == nil || result.ListInfo.Token == "" {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
token = result.ListInfo.Token
|
|
||||||
|
|
||||||
}
|
|
||||||
return files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) getLink(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
|
|
||||||
client := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection())
|
|
||||||
ctx1, cancelFunc := context.WithCancel(context.Background())
|
|
||||||
defer cancelFunc()
|
|
||||||
|
|
||||||
result, err := client.ParseFileSlice(ctx1, (*pubUserFile.File)(file.(*Files)))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
fileAddrs := []*pubUserFile.SliceDownloadInfo{}
|
|
||||||
var addressDuration int64
|
|
||||||
|
|
||||||
nodesNumber := len(result.RawNodes)
|
|
||||||
nodesIndex := nodesNumber - 1
|
|
||||||
startIndex, endIndex := 0, nodesIndex
|
|
||||||
for nodesIndex >= 0 {
|
|
||||||
if nodesIndex >= 200 {
|
|
||||||
endIndex = 200
|
|
||||||
} else {
|
|
||||||
endIndex = nodesNumber
|
|
||||||
}
|
|
||||||
for ; endIndex <= nodesNumber; endIndex += 200 {
|
|
||||||
if endIndex == 0 {
|
|
||||||
endIndex = 1
|
|
||||||
}
|
|
||||||
sliceAddress, err := client.GetSliceDownloadAddress(ctx, &pubUserFile.SliceDownloadAddressRequest{
|
|
||||||
Identity: result.RawNodes[startIndex:endIndex],
|
|
||||||
Version: 1,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
addressDuration = sliceAddress.ExpireAt
|
|
||||||
fileAddrs = append(fileAddrs, sliceAddress.Addresses...)
|
|
||||||
startIndex = endIndex
|
|
||||||
nodesIndex -= 200
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
size := result.FileSize
|
|
||||||
chunks := getChunkSizes(result.Sizes)
|
|
||||||
var finalClosers utils.Closers
|
|
||||||
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
|
||||||
length := httpRange.Length
|
|
||||||
if httpRange.Length >= 0 && httpRange.Start+httpRange.Length >= size {
|
|
||||||
length = -1
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("open download file failed: %w", err)
|
|
||||||
}
|
|
||||||
oo := &openObject{
|
|
||||||
ctx: ctx,
|
|
||||||
d: fileAddrs,
|
|
||||||
chunk: &[]byte{},
|
|
||||||
chunks: &chunks,
|
|
||||||
skip: httpRange.Start,
|
|
||||||
sha: result.Sha1,
|
|
||||||
shaTemp: sha1.New(),
|
|
||||||
}
|
|
||||||
finalClosers.Add(oo)
|
|
||||||
|
|
||||||
return readers.NewLimitedReadCloser(oo, length), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var duration time.Duration
|
|
||||||
if addressDuration != 0 {
|
|
||||||
duration = time.Until(time.UnixMilli(addressDuration))
|
|
||||||
} else {
|
|
||||||
duration = time.Until(time.Now().Add(time.Hour))
|
|
||||||
}
|
|
||||||
|
|
||||||
resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: finalClosers}
|
|
||||||
return &model.Link{
|
|
||||||
RangeReadCloser: resultRangeReadCloser,
|
|
||||||
Expiration: &duration,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) makeDir(ctx context.Context, dir model.Obj, name string) (model.Obj, error) {
|
|
||||||
newDir := userfile.NewFormattedPath(d.GetCurrentOpDir(dir, []string{name}, 0)).GetPath()
|
|
||||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Create(ctx, &pubUserFile.File{
|
|
||||||
Path: newDir,
|
|
||||||
})
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) move(ctx context.Context, obj model.Obj, dir model.Obj) (model.Obj, error) {
|
|
||||||
oldDir := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
|
|
||||||
newDir := userfile.NewFormattedPath(d.GetCurrentDir(dir)).GetPath()
|
|
||||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Move(ctx, &pubUserFile.BatchOperationRequest{
|
|
||||||
Source: []*pubUserFile.File{
|
|
||||||
{
|
|
||||||
Identity: obj.GetID(),
|
|
||||||
Path: oldDir,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Dest: &pubUserFile.File{
|
|
||||||
Identity: dir.GetID(),
|
|
||||||
Path: newDir,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) rename(ctx context.Context, obj model.Obj, name string) (model.Obj, error) {
|
|
||||||
id := obj.GetID()
|
|
||||||
newPath := userfile.NewFormattedPath(d.GetCurrentOpDir(obj, []string{name}, 0)).GetPath()
|
|
||||||
|
|
||||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Rename(ctx, &pubUserFile.File{
|
|
||||||
Path: newPath,
|
|
||||||
Identity: id,
|
|
||||||
Name: name,
|
|
||||||
})
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) copy(ctx context.Context, obj model.Obj, dir model.Obj) (model.Obj, error) {
|
|
||||||
id := obj.GetID()
|
|
||||||
sourcePath := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
|
|
||||||
if len(id) > 0 {
|
|
||||||
sourcePath = ""
|
|
||||||
}
|
|
||||||
dest := &pubUserFile.File{
|
|
||||||
Identity: dir.GetID(),
|
|
||||||
Path: userfile.NewFormattedPath(d.GetCurrentDir(dir)).GetPath(),
|
|
||||||
}
|
|
||||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Copy(ctx, &pubUserFile.BatchOperationRequest{
|
|
||||||
Source: []*pubUserFile.File{
|
|
||||||
{
|
|
||||||
Path: sourcePath,
|
|
||||||
Identity: id,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Dest: dest,
|
|
||||||
})
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
id := obj.GetID()
|
|
||||||
newPath := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
|
|
||||||
//if len(id) > 0 {
|
|
||||||
// newPath = ""
|
|
||||||
//}
|
|
||||||
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Delete(ctx, &pubUserFile.BatchOperationRequest{
|
|
||||||
Source: []*pubUserFile.File{
|
|
||||||
{
|
|
||||||
Path: newPath,
|
|
||||||
Identity: id,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) put(ctx context.Context, dstDir model.Obj, fileStream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
|
|
||||||
newDir := path.Join(dstDir.GetPath(), fileStream.GetName())
|
|
||||||
|
|
||||||
result, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).CreateUploadToken(ctx, &pubUserFile.File{
|
|
||||||
Path: newDir,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
u, _ := url.Parse(result.Endpoint)
|
|
||||||
u.Host = "s3." + u.Host
|
|
||||||
result.Endpoint = u.String()
|
|
||||||
s, err := session.NewSession(&aws.Config{
|
|
||||||
HTTPClient: base.HttpClient,
|
|
||||||
Credentials: credentials.NewStaticCredentials(result.AccessKey, result.SecretKey, result.Token),
|
|
||||||
Region: aws.String(result.Region),
|
|
||||||
Endpoint: aws.String(result.Endpoint),
|
|
||||||
S3ForcePathStyle: aws.Bool(true),
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
uploader := s3manager.NewUploader(s, func(u *s3manager.Uploader) {
|
|
||||||
u.Concurrency = d.uploadThread
|
|
||||||
})
|
|
||||||
if fileStream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
|
||||||
uploader.PartSize = fileStream.GetSize() / (s3manager.MaxUploadParts - 1)
|
|
||||||
}
|
|
||||||
_, err = uploader.UploadWithContext(ctx, &s3manager.UploadInput{
|
|
||||||
Bucket: aws.String(result.Bucket),
|
|
||||||
Key: aws.String(result.Key),
|
|
||||||
Body: io.TeeReader(fileStream, driver.NewProgress(fileStream.GetSize(), up)),
|
|
||||||
})
|
|
||||||
return nil, err
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*HalalCloud)(nil)
|
|
@ -1,38 +0,0 @@
|
|||||||
package halalcloud
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
// Usually one of two
|
|
||||||
driver.RootPath
|
|
||||||
// define other
|
|
||||||
RefreshToken string `json:"refresh_token" required:"true" help:"login type is refresh_token,this is required"`
|
|
||||||
UploadThread string `json:"upload_thread" default:"3" help:"1 <= thread <= 32"`
|
|
||||||
|
|
||||||
AppID string `json:"app_id" required:"true" default:"alist/10001"`
|
|
||||||
AppVersion string `json:"app_version" required:"true" default:"1.0.0"`
|
|
||||||
AppSecret string `json:"app_secret" required:"true" default:"bR4SJwOkvnG5WvVJ"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "HalalCloud",
|
|
||||||
LocalSort: false,
|
|
||||||
OnlyLocal: true,
|
|
||||||
OnlyProxy: true,
|
|
||||||
NoCache: false,
|
|
||||||
NoUpload: false,
|
|
||||||
NeedMs: false,
|
|
||||||
DefaultRoot: "/",
|
|
||||||
CheckStatus: false,
|
|
||||||
Alert: "",
|
|
||||||
NoOverwriteUpload: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &HalalCloud{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,52 +0,0 @@
|
|||||||
package halalcloud
|
|
||||||
|
|
||||||
import "google.golang.org/grpc"
|
|
||||||
|
|
||||||
func defaultOptions() halalOptions {
|
|
||||||
return halalOptions{
|
|
||||||
// onRefreshTokenRefreshed: func(string) {},
|
|
||||||
grpcOptions: []grpc.DialOption{
|
|
||||||
grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(1024 * 1024 * 32)),
|
|
||||||
// grpc.WithMaxMsgSize(1024 * 1024 * 1024),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type HalalOption interface {
|
|
||||||
apply(*halalOptions)
|
|
||||||
}
|
|
||||||
|
|
||||||
// halalOptions configure a RPC call. halalOptions are set by the HalalOption
|
|
||||||
// values passed to Dial.
|
|
||||||
type halalOptions struct {
|
|
||||||
onTokenRefreshed func(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64)
|
|
||||||
grpcOptions []grpc.DialOption
|
|
||||||
}
|
|
||||||
|
|
||||||
// funcDialOption wraps a function that modifies halalOptions into an
|
|
||||||
// implementation of the DialOption interface.
|
|
||||||
type funcDialOption struct {
|
|
||||||
f func(*halalOptions)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fdo *funcDialOption) apply(do *halalOptions) {
|
|
||||||
fdo.f(do)
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFuncDialOption(f func(*halalOptions)) *funcDialOption {
|
|
||||||
return &funcDialOption{
|
|
||||||
f: f,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithRefreshTokenRefreshedCallback(s func(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64)) HalalOption {
|
|
||||||
return newFuncDialOption(func(o *halalOptions) {
|
|
||||||
o.onTokenRefreshed = s
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func WithGrpcDialOptions(opts ...grpc.DialOption) HalalOption {
|
|
||||||
return newFuncDialOption(func(o *halalOptions) {
|
|
||||||
o.grpcOptions = opts
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,101 +0,0 @@
|
|||||||
package halalcloud
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/city404/v6-public-rpc-proto/go/v6/common"
|
|
||||||
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
|
|
||||||
"google.golang.org/grpc"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type AuthService struct {
|
|
||||||
appID string
|
|
||||||
appVersion string
|
|
||||||
appSecret string
|
|
||||||
grpcConnection *grpc.ClientConn
|
|
||||||
dopts halalOptions
|
|
||||||
tr *TokenResp
|
|
||||||
}
|
|
||||||
|
|
||||||
type TokenResp struct {
|
|
||||||
AccessToken string `json:"accessToken,omitempty"`
|
|
||||||
AccessTokenExpiredAt int64 `json:"accessTokenExpiredAt,omitempty"`
|
|
||||||
RefreshToken string `json:"refreshToken,omitempty"`
|
|
||||||
RefreshTokenExpiredAt int64 `json:"refreshTokenExpiredAt,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type UserInfo struct {
|
|
||||||
Identity string `json:"identity,omitempty"`
|
|
||||||
UpdateTs int64 `json:"updateTs,omitempty"`
|
|
||||||
Name string `json:"name,omitempty"`
|
|
||||||
CreateTs int64 `json:"createTs,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type OrderByInfo struct {
|
|
||||||
Field string `json:"field,omitempty"`
|
|
||||||
Asc bool `json:"asc,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListInfo struct {
|
|
||||||
Token string `json:"token,omitempty"`
|
|
||||||
Limit int64 `json:"limit,omitempty"`
|
|
||||||
OrderBy []*OrderByInfo `json:"order_by,omitempty"`
|
|
||||||
Version int32 `json:"version,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type FilesList struct {
|
|
||||||
Files []*Files `json:"files,omitempty"`
|
|
||||||
ListInfo *common.ScanListRequest `json:"list_info,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ model.Obj = (*Files)(nil)
|
|
||||||
|
|
||||||
type Files pubUserFile.File
|
|
||||||
|
|
||||||
func (f *Files) GetSize() int64 {
|
|
||||||
return f.Size
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *Files) GetName() string {
|
|
||||||
return f.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *Files) ModTime() time.Time {
|
|
||||||
return time.UnixMilli(f.UpdateTs)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *Files) CreateTime() time.Time {
|
|
||||||
return time.UnixMilli(f.UpdateTs)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *Files) IsDir() bool {
|
|
||||||
return f.Dir
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *Files) GetHash() utils.HashInfo {
|
|
||||||
return utils.HashInfo{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *Files) GetID() string {
|
|
||||||
if len(f.Identity) == 0 {
|
|
||||||
f.Identity = "/"
|
|
||||||
}
|
|
||||||
return f.Identity
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *Files) GetPath() string {
|
|
||||||
return f.Path
|
|
||||||
}
|
|
||||||
|
|
||||||
type SteamFile struct {
|
|
||||||
file model.File
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *SteamFile) Read(p []byte) (n int, err error) {
|
|
||||||
return s.file.Read(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *SteamFile) Close() error {
|
|
||||||
return s.file.Close()
|
|
||||||
}
|
|
@ -1,385 +0,0 @@
|
|||||||
package halalcloud
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"crypto/md5"
|
|
||||||
"crypto/tls"
|
|
||||||
"encoding/hex"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
pbPublicUser "github.com/city404/v6-public-rpc-proto/go/v6/user"
|
|
||||||
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/ipfs/go-cid"
|
|
||||||
"google.golang.org/grpc"
|
|
||||||
"google.golang.org/grpc/codes"
|
|
||||||
"google.golang.org/grpc/credentials"
|
|
||||||
"google.golang.org/grpc/metadata"
|
|
||||||
"google.golang.org/grpc/status"
|
|
||||||
"hash"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
AppID = "alist/10001"
|
|
||||||
AppVersion = "1.0.0"
|
|
||||||
AppSecret = "bR4SJwOkvnG5WvVJ"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
grpcServer = "grpcuserapi.2dland.cn:443"
|
|
||||||
grpcServerAuth = "grpcuserapi.2dland.cn"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (d *HalalCloud) NewAuthServiceWithOauth(options ...HalalOption) (*AuthService, error) {
|
|
||||||
|
|
||||||
aService := &AuthService{}
|
|
||||||
err2 := errors.New("")
|
|
||||||
|
|
||||||
svc := d.HalalCommon.AuthService
|
|
||||||
for _, opt := range options {
|
|
||||||
opt.apply(&svc.dopts)
|
|
||||||
}
|
|
||||||
|
|
||||||
grpcOptions := svc.dopts.grpcOptions
|
|
||||||
grpcOptions = append(grpcOptions, grpc.WithAuthority(grpcServerAuth), grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{})), grpc.WithUnaryInterceptor(func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
|
|
||||||
ctxx := svc.signContext(method, ctx)
|
|
||||||
err := invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
|
|
||||||
return err
|
|
||||||
}))
|
|
||||||
|
|
||||||
grpcConnection, err := grpc.NewClient(grpcServer, grpcOptions...)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer grpcConnection.Close()
|
|
||||||
userClient := pbPublicUser.NewPubUserClient(grpcConnection)
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
stateString := uuid.New().String()
|
|
||||||
// queryValues.Add("callback", oauthToken.Callback)
|
|
||||||
oauthToken, err := userClient.CreateAuthToken(ctx, &pbPublicUser.LoginRequest{
|
|
||||||
ReturnType: 2,
|
|
||||||
State: stateString,
|
|
||||||
ReturnUrl: "",
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if len(oauthToken.State) < 1 {
|
|
||||||
oauthToken.State = stateString
|
|
||||||
}
|
|
||||||
|
|
||||||
if oauthToken.Url != "" {
|
|
||||||
|
|
||||||
return nil, fmt.Errorf(`need verify: <a target="_blank" href="%s">Click Here</a>`, oauthToken.Url)
|
|
||||||
}
|
|
||||||
|
|
||||||
return aService, err2
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) NewAuthService(refreshToken string, options ...HalalOption) (*AuthService, error) {
|
|
||||||
svc := d.HalalCommon.AuthService
|
|
||||||
|
|
||||||
if len(refreshToken) < 1 {
|
|
||||||
refreshToken = d.Addition.RefreshToken
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(d.tr.AccessToken) > 0 {
|
|
||||||
accessTokenExpiredAt := d.tr.AccessTokenExpiredAt
|
|
||||||
current := time.Now().UnixMilli()
|
|
||||||
if accessTokenExpiredAt < current {
|
|
||||||
// access token expired
|
|
||||||
d.tr.AccessToken = ""
|
|
||||||
d.tr.AccessTokenExpiredAt = 0
|
|
||||||
} else {
|
|
||||||
svc.tr.AccessTokenExpiredAt = accessTokenExpiredAt
|
|
||||||
svc.tr.AccessToken = d.tr.AccessToken
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, opt := range options {
|
|
||||||
opt.apply(&svc.dopts)
|
|
||||||
}
|
|
||||||
|
|
||||||
grpcOptions := svc.dopts.grpcOptions
|
|
||||||
grpcOptions = append(grpcOptions, grpc.WithDefaultCallOptions(grpc.MaxCallSendMsgSize(10*1024*1024), grpc.MaxCallRecvMsgSize(10*1024*1024)), grpc.WithAuthority(grpcServerAuth), grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{})), grpc.WithUnaryInterceptor(func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
|
|
||||||
ctxx := svc.signContext(method, ctx)
|
|
||||||
err := invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
|
|
||||||
if err != nil {
|
|
||||||
grpcStatus, ok := status.FromError(err)
|
|
||||||
|
|
||||||
if ok && grpcStatus.Code() == codes.Unauthenticated && strings.Contains(grpcStatus.Err().Error(), "invalid accesstoken") && len(refreshToken) > 0 {
|
|
||||||
// refresh token
|
|
||||||
refreshResponse, err := pbPublicUser.NewPubUserClient(cc).Refresh(ctx, &pbPublicUser.Token{
|
|
||||||
RefreshToken: refreshToken,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if len(refreshResponse.AccessToken) > 0 {
|
|
||||||
svc.tr.AccessToken = refreshResponse.AccessToken
|
|
||||||
svc.tr.AccessTokenExpiredAt = refreshResponse.AccessTokenExpireTs
|
|
||||||
svc.OnAccessTokenRefreshed(refreshResponse.AccessToken, refreshResponse.AccessTokenExpireTs, refreshResponse.RefreshToken, refreshResponse.RefreshTokenExpireTs)
|
|
||||||
}
|
|
||||||
// retry
|
|
||||||
ctxx := svc.signContext(method, ctx)
|
|
||||||
err = invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
} else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}))
|
|
||||||
grpcConnection, err := grpc.NewClient(grpcServer, grpcOptions...)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
svc.grpcConnection = grpcConnection
|
|
||||||
return svc, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *AuthService) OnAccessTokenRefreshed(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64) {
|
|
||||||
s.tr.AccessToken = accessToken
|
|
||||||
s.tr.AccessTokenExpiredAt = accessTokenExpiredAt
|
|
||||||
s.tr.RefreshToken = refreshToken
|
|
||||||
s.tr.RefreshTokenExpiredAt = refreshTokenExpiredAt
|
|
||||||
|
|
||||||
if s.dopts.onTokenRefreshed != nil {
|
|
||||||
s.dopts.onTokenRefreshed(accessToken, accessTokenExpiredAt, refreshToken, refreshTokenExpiredAt)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *AuthService) GetGrpcConnection() *grpc.ClientConn {
|
|
||||||
return s.grpcConnection
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *AuthService) Close() {
|
|
||||||
_ = s.grpcConnection.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *AuthService) signContext(method string, ctx context.Context) context.Context {
|
|
||||||
var kvString []string
|
|
||||||
currentTimeStamp := strconv.FormatInt(time.Now().UnixMilli(), 10)
|
|
||||||
bufferedString := bytes.NewBufferString(method)
|
|
||||||
kvString = append(kvString, "timestamp", currentTimeStamp)
|
|
||||||
bufferedString.WriteString(currentTimeStamp)
|
|
||||||
kvString = append(kvString, "appid", s.appID)
|
|
||||||
bufferedString.WriteString(s.appID)
|
|
||||||
kvString = append(kvString, "appversion", s.appVersion)
|
|
||||||
bufferedString.WriteString(s.appVersion)
|
|
||||||
if s.tr != nil && len(s.tr.AccessToken) > 0 {
|
|
||||||
authorization := "Bearer " + s.tr.AccessToken
|
|
||||||
kvString = append(kvString, "authorization", authorization)
|
|
||||||
bufferedString.WriteString(authorization)
|
|
||||||
}
|
|
||||||
bufferedString.WriteString(s.appSecret)
|
|
||||||
sign := GetMD5Hash(bufferedString.String())
|
|
||||||
kvString = append(kvString, "sign", sign)
|
|
||||||
return metadata.AppendToOutgoingContext(ctx, kvString...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) GetCurrentOpDir(dir model.Obj, args []string, index int) string {
|
|
||||||
currentDir := dir.GetPath()
|
|
||||||
if len(currentDir) == 0 {
|
|
||||||
currentDir = "/"
|
|
||||||
}
|
|
||||||
opPath := currentDir + "/" + args[index]
|
|
||||||
if strings.HasPrefix(args[index], "/") {
|
|
||||||
opPath = args[index]
|
|
||||||
}
|
|
||||||
return opPath
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *HalalCloud) GetCurrentDir(dir model.Obj) string {
|
|
||||||
currentDir := dir.GetPath()
|
|
||||||
if len(currentDir) == 0 {
|
|
||||||
currentDir = "/"
|
|
||||||
}
|
|
||||||
return currentDir
|
|
||||||
}
|
|
||||||
|
|
||||||
type Common struct {
|
|
||||||
}
|
|
||||||
|
|
||||||
func getRawFiles(addr *pubUserFile.SliceDownloadInfo) ([]byte, error) {
|
|
||||||
|
|
||||||
if addr == nil {
|
|
||||||
return nil, errors.New("addr is nil")
|
|
||||||
}
|
|
||||||
|
|
||||||
client := http.Client{
|
|
||||||
Timeout: time.Duration(60 * time.Second), // Set timeout to 5 seconds
|
|
||||||
}
|
|
||||||
resp, err := client.Get(addr.DownloadAddress)
|
|
||||||
if err != nil {
|
|
||||||
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, fmt.Errorf("bad status: %s, body: %s", resp.Status, body)
|
|
||||||
}
|
|
||||||
|
|
||||||
if addr.Encrypt > 0 {
|
|
||||||
cd := uint8(addr.Encrypt)
|
|
||||||
for idx := 0; idx < len(body); idx++ {
|
|
||||||
body[idx] = body[idx] ^ cd
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if addr.StoreType != 10 {
|
|
||||||
|
|
||||||
sourceCid, err := cid.Decode(addr.Identity)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
checkCid, err := sourceCid.Prefix().Sum(body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if !checkCid.Equals(sourceCid) {
|
|
||||||
return nil, fmt.Errorf("bad cid: %s, body: %s", checkCid.String(), body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return body, nil
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
type openObject struct {
|
|
||||||
ctx context.Context
|
|
||||||
mu sync.Mutex
|
|
||||||
d []*pubUserFile.SliceDownloadInfo
|
|
||||||
id int
|
|
||||||
skip int64
|
|
||||||
chunk *[]byte
|
|
||||||
chunks *[]chunkSize
|
|
||||||
closed bool
|
|
||||||
sha string
|
|
||||||
shaTemp hash.Hash
|
|
||||||
}
|
|
||||||
|
|
||||||
// get the next chunk
|
|
||||||
func (oo *openObject) getChunk(ctx context.Context) (err error) {
|
|
||||||
if oo.id >= len(*oo.chunks) {
|
|
||||||
return io.EOF
|
|
||||||
}
|
|
||||||
var chunk []byte
|
|
||||||
err = utils.Retry(3, time.Second, func() (err error) {
|
|
||||||
chunk, err = getRawFiles(oo.d[oo.id])
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
oo.id++
|
|
||||||
oo.chunk = &chunk
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read reads up to len(p) bytes into p.
|
|
||||||
func (oo *openObject) Read(p []byte) (n int, err error) {
|
|
||||||
oo.mu.Lock()
|
|
||||||
defer oo.mu.Unlock()
|
|
||||||
if oo.closed {
|
|
||||||
return 0, fmt.Errorf("read on closed file")
|
|
||||||
}
|
|
||||||
// Skip data at the start if requested
|
|
||||||
for oo.skip > 0 {
|
|
||||||
//size := 1024 * 1024
|
|
||||||
_, size, err := oo.ChunkLocation(oo.id)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
if oo.skip < int64(size) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
oo.id++
|
|
||||||
oo.skip -= int64(size)
|
|
||||||
}
|
|
||||||
if len(*oo.chunk) == 0 {
|
|
||||||
err = oo.getChunk(oo.ctx)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
if oo.skip > 0 {
|
|
||||||
*oo.chunk = (*oo.chunk)[oo.skip:]
|
|
||||||
oo.skip = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
n = copy(p, *oo.chunk)
|
|
||||||
*oo.chunk = (*oo.chunk)[n:]
|
|
||||||
|
|
||||||
oo.shaTemp.Write(*oo.chunk)
|
|
||||||
|
|
||||||
return n, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close closed the file - MAC errors are reported here
|
|
||||||
func (oo *openObject) Close() (err error) {
|
|
||||||
oo.mu.Lock()
|
|
||||||
defer oo.mu.Unlock()
|
|
||||||
if oo.closed {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
// 校验Sha1
|
|
||||||
if string(oo.shaTemp.Sum(nil)) != oo.sha {
|
|
||||||
return fmt.Errorf("failed to finish download: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
oo.closed = true
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetMD5Hash(text string) string {
|
|
||||||
tHash := md5.Sum([]byte(text))
|
|
||||||
return hex.EncodeToString(tHash[:])
|
|
||||||
}
|
|
||||||
|
|
||||||
// chunkSize describes a size and position of chunk
|
|
||||||
type chunkSize struct {
|
|
||||||
position int64
|
|
||||||
size int
|
|
||||||
}
|
|
||||||
|
|
||||||
func getChunkSizes(sliceSize []*pubUserFile.SliceSize) (chunks []chunkSize) {
|
|
||||||
chunks = make([]chunkSize, 0)
|
|
||||||
for _, s := range sliceSize {
|
|
||||||
// 对最后一个做特殊处理
|
|
||||||
if s.EndIndex == 0 {
|
|
||||||
s.EndIndex = s.StartIndex
|
|
||||||
}
|
|
||||||
for j := s.StartIndex; j <= s.EndIndex; j++ {
|
|
||||||
chunks = append(chunks, chunkSize{position: j, size: int(s.Size)})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return chunks
|
|
||||||
}
|
|
||||||
|
|
||||||
func (oo *openObject) ChunkLocation(id int) (position int64, size int, err error) {
|
|
||||||
if id < 0 || id >= len(*oo.chunks) {
|
|
||||||
return 0, 0, errors.New("invalid arguments")
|
|
||||||
}
|
|
||||||
|
|
||||||
return (*oo.chunks)[id].position, (*oo.chunks)[id].size, nil
|
|
||||||
}
|
|
@ -30,12 +30,10 @@ type ILanZou struct {
|
|||||||
userID string
|
userID string
|
||||||
account string
|
account string
|
||||||
upClient *resty.Client
|
upClient *resty.Client
|
||||||
conf Conf
|
|
||||||
config driver.Config
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *ILanZou) Config() driver.Config {
|
func (d *ILanZou) Config() driver.Config {
|
||||||
return d.config
|
return config
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *ILanZou) GetAddition() driver.Additional {
|
func (d *ILanZou) GetAddition() driver.Additional {
|
||||||
@ -67,28 +65,26 @@ func (d *ILanZou) Drop(ctx context.Context) error {
|
|||||||
|
|
||||||
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
offset := 1
|
offset := 1
|
||||||
|
limit := 60
|
||||||
var res []ListItem
|
var res []ListItem
|
||||||
for {
|
for {
|
||||||
var resp ListResp
|
var resp ListResp
|
||||||
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
||||||
params := []string{
|
req.SetQueryParams(map[string]string{
|
||||||
"offset=" + strconv.Itoa(offset),
|
"type": "0",
|
||||||
"limit=60",
|
"folderId": dir.GetID(),
|
||||||
"folderId=" + dir.GetID(),
|
"offset": strconv.Itoa(offset),
|
||||||
"type=0",
|
"limit": strconv.Itoa(limit),
|
||||||
}
|
}).SetResult(&resp)
|
||||||
queryString := strings.Join(params, "&")
|
|
||||||
req.SetQueryString(queryString).SetResult(&resp)
|
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
res = append(res, resp.List...)
|
res = append(res, resp.List...)
|
||||||
if resp.Offset < resp.TotalPage {
|
if resp.TotalPage <= resp.Offset {
|
||||||
offset++
|
|
||||||
} else {
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
offset++
|
||||||
}
|
}
|
||||||
return utils.SliceConvert(res, func(f ListItem) (model.Obj, error) {
|
return utils.SliceConvert(res, func(f ListItem) (model.Obj, error) {
|
||||||
updTime, err := time.ParseInLocation("2006-01-02 15:04:05", f.UpdTime, time.Local)
|
updTime, err := time.ParseInLocation("2006-01-02 15:04:05", f.UpdTime, time.Local)
|
||||||
@ -116,53 +112,36 @@ func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
u, err := url.Parse(d.conf.base + "/" + d.conf.unproved + "/file/redirect")
|
u, err := url.Parse("https://api.ilanzou.com/unproved/file/redirect")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
ts, ts_str, err := getTimestamp(d.conf.secret)
|
query := u.Query()
|
||||||
|
query.Set("uuid", d.UUID)
|
||||||
params := []string{
|
query.Set("devType", "6")
|
||||||
"uuid=" + url.QueryEscape(d.UUID),
|
query.Set("devCode", d.UUID)
|
||||||
"devType=6",
|
query.Set("devModel", "chrome")
|
||||||
"devCode=" + url.QueryEscape(d.UUID),
|
query.Set("devVersion", "120")
|
||||||
"devModel=chrome",
|
query.Set("appVersion", "")
|
||||||
"devVersion=" + url.QueryEscape(d.conf.devVersion),
|
ts, err := getTimestamp()
|
||||||
"appVersion=",
|
|
||||||
"timestamp=" + ts_str,
|
|
||||||
"appToken=" + url.QueryEscape(d.Token),
|
|
||||||
"enable=0",
|
|
||||||
}
|
|
||||||
|
|
||||||
downloadId, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%s", file.GetID(), d.userID)), d.conf.secret)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
params = append(params, "downloadId="+url.QueryEscape(hex.EncodeToString(downloadId)))
|
query.Set("timestamp", ts)
|
||||||
|
//query.Set("appToken", d.Token)
|
||||||
auth, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%d", file.GetID(), ts)), d.conf.secret)
|
query.Set("enable", "1")
|
||||||
|
downloadId, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%s", file.GetID(), d.userID)), AesSecret)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
params = append(params, "auth="+url.QueryEscape(hex.EncodeToString(auth)))
|
query.Set("downloadId", hex.EncodeToString(downloadId))
|
||||||
|
auth, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%d", file.GetID(), time.Now().UnixMilli())), AesSecret)
|
||||||
u.RawQuery = strings.Join(params, "&")
|
|
||||||
realURL := u.String()
|
|
||||||
// get the url after redirect
|
|
||||||
res, err := base.NoRedirectClient.R().SetHeaders(map[string]string{
|
|
||||||
//"Origin": d.conf.site,
|
|
||||||
"Referer": d.conf.site + "/",
|
|
||||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
|
|
||||||
}).Get(realURL)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if res.StatusCode() == 302 {
|
query.Set("auth", hex.EncodeToString(auth))
|
||||||
realURL = res.Header().Get("location")
|
u.RawQuery = query.Encode()
|
||||||
} else {
|
link := model.Link{URL: u.String()}
|
||||||
return nil, fmt.Errorf("redirect failed, status: %d, msg: %s", res.StatusCode(), utils.Json.Get(res.Body(), "msg").ToString())
|
|
||||||
}
|
|
||||||
link := model.Link{URL: realURL}
|
|
||||||
return &link, nil
|
return &link, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,7 +157,7 @@ func (d *ILanZou) MakeDir(ctx context.Context, parentDir model.Obj, dirName stri
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return &model.Object{
|
return &model.Object{
|
||||||
ID: utils.Json.Get(res, "list", 0, "id").ToString(),
|
ID: utils.Json.Get(res, "list", "0", "id").ToString(),
|
||||||
//Path: "",
|
//Path: "",
|
||||||
Name: dirName,
|
Name: dirName,
|
||||||
Size: 0,
|
Size: 0,
|
||||||
@ -276,7 +255,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
defer func() {
|
defer func() {
|
||||||
_ = tempFile.Close()
|
_ = tempFile.Close()
|
||||||
}()
|
}()
|
||||||
if _, err = utils.CopyWithBuffer(h, tempFile); err != nil {
|
if _, err = io.Copy(h, tempFile); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
_, err = tempFile.Seek(0, io.SeekStart)
|
_, err = tempFile.Seek(0, io.SeekStart)
|
||||||
@ -289,7 +268,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"fileId": "",
|
"fileId": "",
|
||||||
"fileName": stream.GetName(),
|
"fileName": stream.GetName(),
|
||||||
"fileSize": stream.GetSize()/1024 + 1,
|
"fileSize": stream.GetSize() / 1024,
|
||||||
"folderId": dstDir.GetID(),
|
"folderId": dstDir.GetID(),
|
||||||
"md5": etag,
|
"md5": etag,
|
||||||
"type": 1,
|
"type": 1,
|
||||||
@ -302,7 +281,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
now := time.Now()
|
now := time.Now()
|
||||||
key := fmt.Sprintf("disk/%d/%d/%d/%s/%016d", now.Year(), now.Month(), now.Day(), d.account, now.UnixMilli())
|
key := fmt.Sprintf("disk/%d/%d/%d/%s/%016d", now.Year(), now.Month(), now.Day(), d.account, now.UnixMilli())
|
||||||
var token string
|
var token string
|
||||||
if stream.GetSize() <= DefaultPartSize {
|
if stream.GetSize() > DefaultPartSize {
|
||||||
res, err := d.upClient.R().SetMultipartFormData(map[string]string{
|
res, err := d.upClient.R().SetMultipartFormData(map[string]string{
|
||||||
"token": upToken,
|
"token": upToken,
|
||||||
"key": key,
|
"key": key,
|
||||||
@ -315,7 +294,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
token = utils.Json.Get(res.Body(), "token").ToString()
|
token = utils.Json.Get(res.Body(), "token").ToString()
|
||||||
} else {
|
} else {
|
||||||
keyBase64 := base64.URLEncoding.EncodeToString([]byte(key))
|
keyBase64 := base64.URLEncoding.EncodeToString([]byte(key))
|
||||||
res, err := d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads", d.conf.bucket, keyBase64))
|
res, err := d.upClient.R().Post(fmt.Sprintf("https://upload.qiniup.com/buckets/wpanstore-lanzou/objects/%s/uploads", keyBase64))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -323,8 +302,8 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
parts := make([]Part, 0)
|
parts := make([]Part, 0)
|
||||||
partNum := (stream.GetSize() + DefaultPartSize - 1) / DefaultPartSize
|
partNum := (stream.GetSize() + DefaultPartSize - 1) / DefaultPartSize
|
||||||
for i := 1; i <= int(partNum); i++ {
|
for i := 1; i <= int(partNum); i++ {
|
||||||
u := fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s/%d", d.conf.bucket, keyBase64, uploadId, i)
|
u := fmt.Sprintf("https://upload.qiniup.com/buckets/wpanstore-lanzou/objects/%s/uploads/%s/%d", keyBase64, uploadId, i)
|
||||||
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(io.LimitReader(tempFile, DefaultPartSize)).Put(u)
|
res, err = d.upClient.R().SetBody(io.LimitReader(tempFile, DefaultPartSize)).Put(u)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -334,10 +313,10 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
ETag: etag,
|
ETag: etag,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(base.Json{
|
res, err = d.upClient.R().SetBody(base.Json{
|
||||||
"fnmae": stream.GetName(),
|
"fnmae": stream.GetName(),
|
||||||
"parts": parts,
|
"parts": parts,
|
||||||
}).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s", d.conf.bucket, keyBase64, uploadId))
|
}).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/wpanstore-lanzou/objects/%s/uploads/%s", keyBase64, uploadId))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -347,12 +326,10 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
var resp UploadResultResp
|
var resp UploadResultResp
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
_, err = d.unproved("/7n/results", http.MethodPost, func(req *resty.Request) {
|
_, err = d.unproved("/7n/results", http.MethodPost, func(req *resty.Request) {
|
||||||
params := []string{
|
req.SetQueryParams(map[string]string{
|
||||||
"tokenList=" + token,
|
"tokenList": token,
|
||||||
"tokenTime=" + time.Now().Format("Mon Jan 02 2006 15:04:05 GMT-0700 (MST)"),
|
"tokenTime": time.Now().Format("Mon Jan 02 2006 15:04:05 GMT-0700 (MST)"),
|
||||||
}
|
}).SetResult(&resp)
|
||||||
queryString := strings.Join(params, "&")
|
|
||||||
req.SetQueryString(queryString).SetResult(&resp)
|
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -14,67 +14,22 @@ type Addition struct {
|
|||||||
UUID string
|
UUID string
|
||||||
}
|
}
|
||||||
|
|
||||||
type Conf struct {
|
var config = driver.Config{
|
||||||
base string
|
Name: "ILanZou",
|
||||||
secret []byte
|
LocalSort: false,
|
||||||
bucket string
|
OnlyLocal: false,
|
||||||
unproved string
|
OnlyProxy: false,
|
||||||
proved string
|
NoCache: false,
|
||||||
devVersion string
|
NoUpload: false,
|
||||||
site string
|
NeedMs: false,
|
||||||
|
DefaultRoot: "0",
|
||||||
|
CheckStatus: false,
|
||||||
|
Alert: "",
|
||||||
|
NoOverwriteUpload: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
return &ILanZou{
|
return &ILanZou{}
|
||||||
config: driver.Config{
|
|
||||||
Name: "ILanZou",
|
|
||||||
LocalSort: false,
|
|
||||||
OnlyLocal: false,
|
|
||||||
OnlyProxy: false,
|
|
||||||
NoCache: false,
|
|
||||||
NoUpload: false,
|
|
||||||
NeedMs: false,
|
|
||||||
DefaultRoot: "0",
|
|
||||||
CheckStatus: false,
|
|
||||||
Alert: "",
|
|
||||||
NoOverwriteUpload: false,
|
|
||||||
},
|
|
||||||
conf: Conf{
|
|
||||||
base: "https://api.ilanzou.com",
|
|
||||||
secret: []byte("lanZouY-disk-app"),
|
|
||||||
bucket: "wpanstore-lanzou",
|
|
||||||
unproved: "unproved",
|
|
||||||
proved: "proved",
|
|
||||||
devVersion: "125",
|
|
||||||
site: "https://www.ilanzou.com",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
})
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &ILanZou{
|
|
||||||
config: driver.Config{
|
|
||||||
Name: "FeijiPan",
|
|
||||||
LocalSort: false,
|
|
||||||
OnlyLocal: false,
|
|
||||||
OnlyProxy: false,
|
|
||||||
NoCache: false,
|
|
||||||
NoUpload: false,
|
|
||||||
NeedMs: false,
|
|
||||||
DefaultRoot: "0",
|
|
||||||
CheckStatus: false,
|
|
||||||
Alert: "",
|
|
||||||
NoOverwriteUpload: false,
|
|
||||||
},
|
|
||||||
conf: Conf{
|
|
||||||
base: "https://api.feijipan.com",
|
|
||||||
secret: []byte("dingHao-disk-app"),
|
|
||||||
bucket: "wpanstore",
|
|
||||||
unproved: "ws",
|
|
||||||
proved: "app",
|
|
||||||
devVersion: "125",
|
|
||||||
site: "https://www.feijipan.com",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -4,9 +4,7 @@ import (
|
|||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
@ -16,6 +14,14 @@ import (
|
|||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
Base = "https://api.ilanzou.com"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
AesSecret = []byte("lanZouY-disk-app")
|
||||||
|
)
|
||||||
|
|
||||||
func (d *ILanZou) login() error {
|
func (d *ILanZou) login() error {
|
||||||
res, err := d.unproved("/login", http.MethodPost, func(req *resty.Request) {
|
res, err := d.unproved("/login", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
@ -33,52 +39,40 @@ func (d *ILanZou) login() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getTimestamp(secret []byte) (int64, string, error) {
|
func getTimestamp() (string, error) {
|
||||||
ts := time.Now().UnixMilli()
|
ts := time.Now().UnixMilli()
|
||||||
tsStr := strconv.FormatInt(ts, 10)
|
tsStr := strconv.FormatInt(ts, 10)
|
||||||
res, err := mopan.AesEncrypt([]byte(tsStr), secret)
|
res, err := mopan.AesEncrypt([]byte(tsStr), AesSecret)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return ts, hex.EncodeToString(res), nil
|
return hex.EncodeToString(res), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, proved bool, retry ...bool) ([]byte, error) {
|
func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, proved bool, retry ...bool) ([]byte, error) {
|
||||||
_, ts_str, err := getTimestamp(d.conf.secret)
|
req := base.RestyClient.R()
|
||||||
|
ts, err := getTimestamp()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
params := []string{
|
"uuid": d.UUID,
|
||||||
"uuid=" + url.QueryEscape(d.UUID),
|
"devType": "6",
|
||||||
"devType=6",
|
"devCode": d.UUID,
|
||||||
"devCode=" + url.QueryEscape(d.UUID),
|
"devModel": "chrome",
|
||||||
"devModel=chrome",
|
"devVersion": "120",
|
||||||
"devVersion=" + url.QueryEscape(d.conf.devVersion),
|
"appVersion": "",
|
||||||
"appVersion=",
|
"timestamp": ts,
|
||||||
"timestamp=" + ts_str,
|
//"appToken": d.Token,
|
||||||
}
|
"extra": "2",
|
||||||
|
|
||||||
if proved {
|
|
||||||
params = append(params, "appToken="+url.QueryEscape(d.Token))
|
|
||||||
}
|
|
||||||
|
|
||||||
params = append(params, "extra=2")
|
|
||||||
|
|
||||||
queryString := strings.Join(params, "&")
|
|
||||||
|
|
||||||
req := base.RestyClient.R()
|
|
||||||
req.SetHeaders(map[string]string{
|
|
||||||
"Origin": d.conf.site,
|
|
||||||
"Referer": d.conf.site + "/",
|
|
||||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
|
|
||||||
})
|
})
|
||||||
|
if proved {
|
||||||
|
req.SetQueryParam("appToken", d.Token)
|
||||||
|
}
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
}
|
}
|
||||||
|
res, err := req.Execute(method, Base+pathname)
|
||||||
res, err := req.Execute(method, d.conf.base+pathname+"?"+queryString)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if res != nil {
|
if res != nil {
|
||||||
log.Errorf("[iLanZou] request error: %s", res.String())
|
log.Errorf("[iLanZou] request error: %s", res.String())
|
||||||
@ -103,9 +97,9 @@ func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, pr
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *ILanZou) unproved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
|
func (d *ILanZou) unproved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
|
||||||
return d.request("/"+d.conf.unproved+pathname, method, callback, false)
|
return d.request("/unproved"+pathname, method, callback, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *ILanZou) proved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
|
func (d *ILanZou) proved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
|
||||||
return d.request("/"+d.conf.proved+pathname, method, callback, true)
|
return d.request("/proved"+pathname, method, callback, true)
|
||||||
}
|
}
|
||||||
|
@ -62,7 +62,7 @@ func (d *IPFS) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]
|
|||||||
for _, file := range dirs {
|
for _, file := range dirs {
|
||||||
gateurl := *d.gateURL
|
gateurl := *d.gateURL
|
||||||
gateurl.Path = "ipfs/" + file.Hash
|
gateurl.Path = "ipfs/" + file.Hash
|
||||||
gateurl.RawQuery = "filename=" + url.PathEscape(file.Name)
|
gateurl.RawQuery = "filename=" + file.Name
|
||||||
objlist = append(objlist, &model.ObjectURL{
|
objlist = append(objlist, &model.ObjectURL{
|
||||||
Object: model.Object{ID: file.Hash, Name: file.Name, Size: int64(file.Size), IsFolder: file.Type == 1},
|
Object: model.Object{ID: file.Hash, Name: file.Name, Size: int64(file.Size), IsFolder: file.Type == 1},
|
||||||
Url: model.Url{Url: gateurl.String()},
|
Url: model.Url{Url: gateurl.String()},
|
||||||
@ -73,7 +73,7 @@ func (d *IPFS) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *IPFS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *IPFS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
link := d.Gateway + "/ipfs/" + file.GetID() + "/?filename=" + url.PathEscape(file.GetName())
|
link := d.Gateway + "/ipfs/" + file.GetID() + "/?filename=" + file.GetName()
|
||||||
return &model.Link{URL: link}, nil
|
return &model.Link{URL: link}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,273 +0,0 @@
|
|||||||
package kodbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
"net/http"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
)
|
|
||||||
|
|
||||||
type KodBox struct {
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
authorization string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) GetAddition() driver.Additional {
|
|
||||||
return &d.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Init(ctx context.Context) error {
|
|
||||||
d.Address = strings.TrimSuffix(d.Address, "/")
|
|
||||||
d.RootFolderPath = strings.TrimPrefix(utils.FixAndCleanPath(d.RootFolderPath), "/")
|
|
||||||
return d.getToken()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Drop(ctx context.Context) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
var (
|
|
||||||
resp *CommonResp
|
|
||||||
listPathData *ListPathData
|
|
||||||
)
|
|
||||||
|
|
||||||
_, err := d.request(http.MethodPost, "/?explorer/list/path", func(req *resty.Request) {
|
|
||||||
req.SetResult(&resp).SetFormData(map[string]string{
|
|
||||||
"path": dir.GetPath(),
|
|
||||||
})
|
|
||||||
}, true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
dataBytes, err := utils.Json.Marshal(resp.Data)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = utils.Json.Unmarshal(dataBytes, &listPathData)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
FolderAndFiles := append(listPathData.FolderList, listPathData.FileList...)
|
|
||||||
|
|
||||||
return utils.SliceConvert(FolderAndFiles, func(f FolderOrFile) (model.Obj, error) {
|
|
||||||
return &model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
Path: f.Path,
|
|
||||||
Name: f.Name,
|
|
||||||
Ctime: time.Unix(f.CreateTime, 0),
|
|
||||||
Modified: time.Unix(f.ModifyTime, 0),
|
|
||||||
Size: f.Size,
|
|
||||||
IsFolder: f.Type == "folder",
|
|
||||||
},
|
|
||||||
//Thumbnail: model.Thumbnail{},
|
|
||||||
}, nil
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
path := file.GetPath()
|
|
||||||
return &model.Link{
|
|
||||||
URL: fmt.Sprintf("%s/?explorer/index/fileOut&path=%s&download=1&accessToken=%s",
|
|
||||||
d.Address,
|
|
||||||
path,
|
|
||||||
d.authorization)}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
var resp *CommonResp
|
|
||||||
newDirPath := filepath.Join(parentDir.GetPath(), dirName)
|
|
||||||
|
|
||||||
_, err := d.request(http.MethodPost, "/?explorer/index/mkdir", func(req *resty.Request) {
|
|
||||||
req.SetResult(&resp).SetFormData(map[string]string{
|
|
||||||
"path": newDirPath,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
code := resp.Code.(bool)
|
|
||||||
if !code {
|
|
||||||
return nil, fmt.Errorf("%s", resp.Data)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
Path: resp.Info.(string),
|
|
||||||
Name: dirName,
|
|
||||||
IsFolder: true,
|
|
||||||
Modified: time.Now(),
|
|
||||||
Ctime: time.Now(),
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
var resp *CommonResp
|
|
||||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathCuteTo", func(req *resty.Request) {
|
|
||||||
req.SetResult(&resp).SetFormData(map[string]string{
|
|
||||||
"dataArr": fmt.Sprintf("[{\"path\": \"%s\", \"name\": \"%s\"}]",
|
|
||||||
srcObj.GetPath(),
|
|
||||||
srcObj.GetName()),
|
|
||||||
"path": dstDir.GetPath(),
|
|
||||||
})
|
|
||||||
}, true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
code := resp.Code.(bool)
|
|
||||||
if !code {
|
|
||||||
return nil, fmt.Errorf("%s", resp.Data)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
Path: srcObj.GetPath(),
|
|
||||||
Name: srcObj.GetName(),
|
|
||||||
IsFolder: srcObj.IsDir(),
|
|
||||||
Modified: srcObj.ModTime(),
|
|
||||||
Ctime: srcObj.CreateTime(),
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
var resp *CommonResp
|
|
||||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathRename", func(req *resty.Request) {
|
|
||||||
req.SetResult(&resp).SetFormData(map[string]string{
|
|
||||||
"path": srcObj.GetPath(),
|
|
||||||
"newName": newName,
|
|
||||||
})
|
|
||||||
}, true)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
code := resp.Code.(bool)
|
|
||||||
if !code {
|
|
||||||
return nil, fmt.Errorf("%s", resp.Data)
|
|
||||||
}
|
|
||||||
return &model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
Path: srcObj.GetPath(),
|
|
||||||
Name: newName,
|
|
||||||
IsFolder: srcObj.IsDir(),
|
|
||||||
Modified: time.Now(),
|
|
||||||
Ctime: srcObj.CreateTime(),
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
var resp *CommonResp
|
|
||||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathCopyTo", func(req *resty.Request) {
|
|
||||||
req.SetResult(&resp).SetFormData(map[string]string{
|
|
||||||
"dataArr": fmt.Sprintf("[{\"path\": \"%s\", \"name\": \"%s\"}]",
|
|
||||||
srcObj.GetPath(),
|
|
||||||
srcObj.GetName()),
|
|
||||||
"path": dstDir.GetPath(),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
code := resp.Code.(bool)
|
|
||||||
if !code {
|
|
||||||
return nil, fmt.Errorf("%s", resp.Data)
|
|
||||||
}
|
|
||||||
|
|
||||||
path := resp.Info.([]interface{})[0].(string)
|
|
||||||
objectName, err := d.getFileOrFolderName(ctx, path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
Path: path,
|
|
||||||
Name: *objectName,
|
|
||||||
IsFolder: srcObj.IsDir(),
|
|
||||||
Modified: time.Now(),
|
|
||||||
Ctime: time.Now(),
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
var resp *CommonResp
|
|
||||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathDelete", func(req *resty.Request) {
|
|
||||||
req.SetResult(&resp).SetFormData(map[string]string{
|
|
||||||
"dataArr": fmt.Sprintf("[{\"path\": \"%s\", \"name\": \"%s\"}]",
|
|
||||||
obj.GetPath(),
|
|
||||||
obj.GetName()),
|
|
||||||
"shiftDelete": "1",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
code := resp.Code.(bool)
|
|
||||||
if !code {
|
|
||||||
return fmt.Errorf("%s", resp.Data)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
var resp *CommonResp
|
|
||||||
_, err := d.request(http.MethodPost, "/?explorer/upload/fileUpload", func(req *resty.Request) {
|
|
||||||
req.SetFileReader("file", stream.GetName(), stream).
|
|
||||||
SetResult(&resp).
|
|
||||||
SetFormData(map[string]string{
|
|
||||||
"path": dstDir.GetPath(),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
code := resp.Code.(bool)
|
|
||||||
if !code {
|
|
||||||
return nil, fmt.Errorf("%s", resp.Data)
|
|
||||||
}
|
|
||||||
return &model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
Path: resp.Info.(string),
|
|
||||||
Name: stream.GetName(),
|
|
||||||
Size: stream.GetSize(),
|
|
||||||
IsFolder: false,
|
|
||||||
Modified: time.Now(),
|
|
||||||
Ctime: time.Now(),
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) getFileOrFolderName(ctx context.Context, path string) (*string, error) {
|
|
||||||
var resp *CommonResp
|
|
||||||
_, err := d.request(http.MethodPost, "/?explorer/index/pathInfo", func(req *resty.Request) {
|
|
||||||
req.SetResult(&resp).SetFormData(map[string]string{
|
|
||||||
"dataArr": fmt.Sprintf("[{\"path\": \"%s\"}]", path)})
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
code := resp.Code.(bool)
|
|
||||||
if !code {
|
|
||||||
return nil, fmt.Errorf("%s", resp.Data)
|
|
||||||
}
|
|
||||||
folderOrFileName := resp.Data.(map[string]any)["name"].(string)
|
|
||||||
return &folderOrFileName, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*KodBox)(nil)
|
|
@ -1,25 +0,0 @@
|
|||||||
package kodbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
driver.RootPath
|
|
||||||
|
|
||||||
Address string `json:"address" required:"true"`
|
|
||||||
UserName string `json:"username" required:"false"`
|
|
||||||
Password string `json:"password" required:"false"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "KodBox",
|
|
||||||
DefaultRoot: "",
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &KodBox{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,24 +0,0 @@
|
|||||||
package kodbox
|
|
||||||
|
|
||||||
type CommonResp struct {
|
|
||||||
Code any `json:"code"`
|
|
||||||
TimeUse string `json:"timeUse"`
|
|
||||||
TimeNow string `json:"timeNow"`
|
|
||||||
Data any `json:"data"`
|
|
||||||
Info any `json:"info"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListPathData struct {
|
|
||||||
FolderList []FolderOrFile `json:"folderList"`
|
|
||||||
FileList []FolderOrFile `json:"fileList"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type FolderOrFile struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Path string `json:"path"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
Ext string `json:"ext,omitempty"` // 文件特有字段
|
|
||||||
Size int64 `json:"size"`
|
|
||||||
CreateTime int64 `json:"createTime"`
|
|
||||||
ModifyTime int64 `json:"modifyTime"`
|
|
||||||
}
|
|
@ -1,86 +0,0 @@
|
|||||||
package kodbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (d *KodBox) getToken() error {
|
|
||||||
var authResp CommonResp
|
|
||||||
res, err := base.RestyClient.R().
|
|
||||||
SetResult(&authResp).
|
|
||||||
SetQueryParams(map[string]string{
|
|
||||||
"name": d.UserName,
|
|
||||||
"password": d.Password,
|
|
||||||
}).
|
|
||||||
Post(d.Address + "/?user/index/loginSubmit")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if res.StatusCode() >= 400 {
|
|
||||||
return fmt.Errorf("get token failed: %s", res.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
if res.StatusCode() == 200 && authResp.Code.(bool) == false {
|
|
||||||
return fmt.Errorf("get token failed: %s", res.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
d.authorization = fmt.Sprintf("%s", authResp.Info)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *KodBox) request(method string, pathname string, callback base.ReqCallback, noRedirect ...bool) ([]byte, error) {
|
|
||||||
full := pathname
|
|
||||||
if !strings.HasPrefix(pathname, "http") {
|
|
||||||
full = d.Address + pathname
|
|
||||||
}
|
|
||||||
req := base.RestyClient.R()
|
|
||||||
if len(noRedirect) > 0 && noRedirect[0] {
|
|
||||||
req = base.NoRedirectClient.R()
|
|
||||||
}
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"accessToken": d.authorization,
|
|
||||||
})
|
|
||||||
callback(req)
|
|
||||||
|
|
||||||
var (
|
|
||||||
res *resty.Response
|
|
||||||
commonResp *CommonResp
|
|
||||||
err error
|
|
||||||
skip bool
|
|
||||||
)
|
|
||||||
for i := 0; i < 2; i++ {
|
|
||||||
if skip {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
res, err = req.Execute(method, full)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
err := utils.Json.Unmarshal(res.Body(), &commonResp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch commonResp.Code.(type) {
|
|
||||||
case bool:
|
|
||||||
skip = true
|
|
||||||
case string:
|
|
||||||
if commonResp.Code.(string) == "10001" {
|
|
||||||
err = d.getToken()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
req.SetFormData(map[string]string{"accessToken": d.authorization})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if commonResp.Code.(bool) == false {
|
|
||||||
return nil, fmt.Errorf("request failed: %s", commonResp.Data)
|
|
||||||
}
|
|
||||||
return res.Body(), nil
|
|
||||||
}
|
|
@ -30,9 +30,6 @@ func (d *LanZou) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *LanZou) Init(ctx context.Context) (err error) {
|
func (d *LanZou) Init(ctx context.Context) (err error) {
|
||||||
if d.UserAgent == "" {
|
|
||||||
d.UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.39 (KHTML, like Gecko) Chrome/89.0.4389.111 Safari/537.39"
|
|
||||||
}
|
|
||||||
switch d.Type {
|
switch d.Type {
|
||||||
case "account":
|
case "account":
|
||||||
_, err := d.Login()
|
_, err := d.Login()
|
||||||
|
@ -16,8 +16,7 @@ type Addition struct {
|
|||||||
driver.RootID
|
driver.RootID
|
||||||
SharePassword string `json:"share_password"`
|
SharePassword string `json:"share_password"`
|
||||||
BaseUrl string `json:"baseUrl" required:"true" default:"https://pc.woozooo.com" help:"basic URL for file operation"`
|
BaseUrl string `json:"baseUrl" required:"true" default:"https://pc.woozooo.com" help:"basic URL for file operation"`
|
||||||
ShareUrl string `json:"shareUrl" required:"true" default:"https://pan.lanzoui.com" help:"used to get the sharing page"`
|
ShareUrl string `json:"shareUrl" required:"true" default:"https://pan.lanzouo.com" help:"used to get the sharing page"`
|
||||||
UserAgent string `json:"user_agent" required:"true" default:"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.39 (KHTML, like Gecko) Chrome/89.0.4389.111 Safari/537.39"`
|
|
||||||
RepairFileInfo bool `json:"repair_file_info" help:"To use webdav, you need to enable it"`
|
RepairFileInfo bool `json:"repair_file_info" help:"To use webdav, you need to enable it"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,8 +106,7 @@ func (d *LanZou) request(url string, method string, callback base.ReqCallback, u
|
|||||||
}
|
}
|
||||||
|
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Referer": "https://pc.woozooo.com",
|
"Referer": "https://pc.woozooo.com",
|
||||||
"User-Agent": d.UserAgent,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if d.Cookie != "" {
|
if d.Cookie != "" {
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
// +build linux darwin windows
|
|
||||||
// +build amd64 arm64
|
|
||||||
|
|
||||||
package drivers
|
|
||||||
|
|
||||||
import (
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/lark"
|
|
||||||
)
|
|
@ -1,397 +0,0 @@
|
|||||||
package lark
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
lark "github.com/larksuite/oapi-sdk-go/v3"
|
|
||||||
larkcore "github.com/larksuite/oapi-sdk-go/v3/core"
|
|
||||||
larkdrive "github.com/larksuite/oapi-sdk-go/v3/service/drive/v1"
|
|
||||||
"golang.org/x/time/rate"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Lark struct {
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
|
|
||||||
client *lark.Client
|
|
||||||
rootFolderToken string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) GetAddition() driver.Additional {
|
|
||||||
return &c.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) Init(ctx context.Context) error {
|
|
||||||
c.client = lark.NewClient(c.AppId, c.AppSecret, lark.WithTokenCache(newTokenCache()))
|
|
||||||
|
|
||||||
paths := strings.Split(c.RootFolderPath, "/")
|
|
||||||
token := ""
|
|
||||||
|
|
||||||
var ok bool
|
|
||||||
var file *larkdrive.File
|
|
||||||
for _, p := range paths {
|
|
||||||
if p == "" {
|
|
||||||
token = ""
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := c.client.Drive.File.ListByIterator(ctx, larkdrive.NewListFileReqBuilder().FolderToken(token).Build())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
|
||||||
ok, file, err = resp.Next()
|
|
||||||
if !ok {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if *file.Type == "folder" && *file.Name == p {
|
|
||||||
token = *file.Token
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
c.rootFolderToken = token
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) Drop(ctx context.Context) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
token, ok := c.getObjToken(ctx, dir.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
if token == emptyFolderToken {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := c.client.Drive.File.ListByIterator(ctx, larkdrive.NewListFileReqBuilder().FolderToken(token).Build())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ok = false
|
|
||||||
var file *larkdrive.File
|
|
||||||
var res []model.Obj
|
|
||||||
|
|
||||||
for {
|
|
||||||
ok, file, err = resp.Next()
|
|
||||||
if !ok {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
modifiedUnix, _ := strconv.ParseInt(*file.ModifiedTime, 10, 64)
|
|
||||||
createdUnix, _ := strconv.ParseInt(*file.CreatedTime, 10, 64)
|
|
||||||
|
|
||||||
f := model.Object{
|
|
||||||
ID: *file.Token,
|
|
||||||
Path: strings.Join([]string{c.RootFolderPath, dir.GetPath(), *file.Name}, "/"),
|
|
||||||
Name: *file.Name,
|
|
||||||
Size: 0,
|
|
||||||
Modified: time.Unix(modifiedUnix, 0),
|
|
||||||
Ctime: time.Unix(createdUnix, 0),
|
|
||||||
IsFolder: *file.Type == "folder",
|
|
||||||
}
|
|
||||||
res = append(res, &f)
|
|
||||||
}
|
|
||||||
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
token, ok := c.getObjToken(ctx, file.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := c.client.GetTenantAccessTokenBySelfBuiltApp(ctx, &larkcore.SelfBuiltTenantAccessTokenReq{
|
|
||||||
AppID: c.AppId,
|
|
||||||
AppSecret: c.AppSecret,
|
|
||||||
})
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !c.ExternalMode {
|
|
||||||
accessToken := resp.TenantAccessToken
|
|
||||||
|
|
||||||
url := fmt.Sprintf("https://open.feishu.cn/open-apis/drive/v1/files/%s/download", token)
|
|
||||||
|
|
||||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", accessToken))
|
|
||||||
req.Header.Set("Range", "bytes=0-1")
|
|
||||||
|
|
||||||
ar, err := http.DefaultClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if ar.StatusCode != http.StatusPartialContent {
|
|
||||||
return nil, errors.New("failed to get download link")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Link{
|
|
||||||
URL: url,
|
|
||||||
Header: http.Header{
|
|
||||||
"Authorization": []string{fmt.Sprintf("Bearer %s", accessToken)},
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
} else {
|
|
||||||
url := strings.Join([]string{c.TenantUrlPrefix, "file", token}, "/")
|
|
||||||
|
|
||||||
return &model.Link{
|
|
||||||
URL: url,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
token, ok := c.getObjToken(ctx, parentDir.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
body, err := larkdrive.NewCreateFolderFilePathReqBodyBuilder().FolderToken(token).Name(dirName).Build()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := c.client.Drive.File.CreateFolder(ctx,
|
|
||||||
larkdrive.NewCreateFolderFileReqBuilder().Body(body).Build())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !resp.Success() {
|
|
||||||
return nil, errors.New(resp.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Object{
|
|
||||||
ID: *resp.Data.Token,
|
|
||||||
Path: strings.Join([]string{c.RootFolderPath, parentDir.GetPath(), dirName}, "/"),
|
|
||||||
Name: dirName,
|
|
||||||
Size: 0,
|
|
||||||
IsFolder: true,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
srcToken, ok := c.getObjToken(ctx, srcObj.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
dstDirToken, ok := c.getObjToken(ctx, dstDir.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
req := larkdrive.NewMoveFileReqBuilder().
|
|
||||||
Body(larkdrive.NewMoveFileReqBodyBuilder().
|
|
||||||
Type("file").
|
|
||||||
FolderToken(dstDirToken).
|
|
||||||
Build()).FileToken(srcToken).
|
|
||||||
Build()
|
|
||||||
|
|
||||||
// 发起请求
|
|
||||||
resp, err := c.client.Drive.File.Move(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !resp.Success() {
|
|
||||||
return nil, errors.New(resp.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
// TODO rename obj, optional
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
srcToken, ok := c.getObjToken(ctx, srcObj.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
dstDirToken, ok := c.getObjToken(ctx, dstDir.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
req := larkdrive.NewCopyFileReqBuilder().
|
|
||||||
Body(larkdrive.NewCopyFileReqBodyBuilder().
|
|
||||||
Name(srcObj.GetName()).
|
|
||||||
Type("file").
|
|
||||||
FolderToken(dstDirToken).
|
|
||||||
Build()).FileToken(srcToken).
|
|
||||||
Build()
|
|
||||||
|
|
||||||
// 发起请求
|
|
||||||
resp, err := c.client.Drive.File.Copy(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !resp.Success() {
|
|
||||||
return nil, errors.New(resp.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Lark) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
token, ok := c.getObjToken(ctx, obj.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
req := larkdrive.NewDeleteFileReqBuilder().
|
|
||||||
FileToken(token).
|
|
||||||
Type("file").
|
|
||||||
Build()
|
|
||||||
|
|
||||||
// 发起请求
|
|
||||||
resp, err := c.client.Drive.File.Delete(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !resp.Success() {
|
|
||||||
return errors.New(resp.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var uploadLimit = rate.NewLimiter(rate.Every(time.Second), 5)
|
|
||||||
|
|
||||||
func (c *Lark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
token, ok := c.getObjToken(ctx, dstDir.GetPath())
|
|
||||||
if !ok {
|
|
||||||
return nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
// prepare
|
|
||||||
req := larkdrive.NewUploadPrepareFileReqBuilder().
|
|
||||||
FileUploadInfo(larkdrive.NewFileUploadInfoBuilder().
|
|
||||||
FileName(stream.GetName()).
|
|
||||||
ParentType(`explorer`).
|
|
||||||
ParentNode(token).
|
|
||||||
Size(int(stream.GetSize())).
|
|
||||||
Build()).
|
|
||||||
Build()
|
|
||||||
|
|
||||||
// 发起请求
|
|
||||||
uploadLimit.Wait(ctx)
|
|
||||||
resp, err := c.client.Drive.File.UploadPrepare(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !resp.Success() {
|
|
||||||
return nil, errors.New(resp.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
uploadId := *resp.Data.UploadId
|
|
||||||
blockSize := *resp.Data.BlockSize
|
|
||||||
blockCount := *resp.Data.BlockNum
|
|
||||||
|
|
||||||
// upload
|
|
||||||
for i := 0; i < blockCount; i++ {
|
|
||||||
length := int64(blockSize)
|
|
||||||
if i == blockCount-1 {
|
|
||||||
length = stream.GetSize() - int64(i*blockSize)
|
|
||||||
}
|
|
||||||
|
|
||||||
reader := io.LimitReader(stream, length)
|
|
||||||
|
|
||||||
req := larkdrive.NewUploadPartFileReqBuilder().
|
|
||||||
Body(larkdrive.NewUploadPartFileReqBodyBuilder().
|
|
||||||
UploadId(uploadId).
|
|
||||||
Seq(i).
|
|
||||||
Size(int(length)).
|
|
||||||
File(reader).
|
|
||||||
Build()).
|
|
||||||
Build()
|
|
||||||
|
|
||||||
// 发起请求
|
|
||||||
uploadLimit.Wait(ctx)
|
|
||||||
resp, err := c.client.Drive.File.UploadPart(ctx, req)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !resp.Success() {
|
|
||||||
return nil, errors.New(resp.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
up(float64(i) / float64(blockCount))
|
|
||||||
}
|
|
||||||
|
|
||||||
//close
|
|
||||||
closeReq := larkdrive.NewUploadFinishFileReqBuilder().
|
|
||||||
Body(larkdrive.NewUploadFinishFileReqBodyBuilder().
|
|
||||||
UploadId(uploadId).
|
|
||||||
BlockNum(blockCount).
|
|
||||||
Build()).
|
|
||||||
Build()
|
|
||||||
|
|
||||||
// 发起请求
|
|
||||||
closeResp, err := c.client.Drive.File.UploadFinish(ctx, closeReq)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !closeResp.Success() {
|
|
||||||
return nil, errors.New(closeResp.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Object{
|
|
||||||
ID: *closeResp.Data.FileToken,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
//func (d *Lark) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
|
||||||
// return nil, errs.NotSupport
|
|
||||||
//}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*Lark)(nil)
|
|
@ -1,36 +0,0 @@
|
|||||||
package lark
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
// Usually one of two
|
|
||||||
driver.RootPath
|
|
||||||
// define other
|
|
||||||
AppId string `json:"app_id" type:"text" help:"app id"`
|
|
||||||
AppSecret string `json:"app_secret" type:"text" help:"app secret"`
|
|
||||||
ExternalMode bool `json:"external_mode" type:"bool" help:"external mode"`
|
|
||||||
TenantUrlPrefix string `json:"tenant_url_prefix" type:"text" help:"tenant url prefix"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "Lark",
|
|
||||||
LocalSort: false,
|
|
||||||
OnlyLocal: false,
|
|
||||||
OnlyProxy: false,
|
|
||||||
NoCache: false,
|
|
||||||
NoUpload: false,
|
|
||||||
NeedMs: false,
|
|
||||||
DefaultRoot: "/",
|
|
||||||
CheckStatus: false,
|
|
||||||
Alert: "",
|
|
||||||
NoOverwriteUpload: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &Lark{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,32 +0,0 @@
|
|||||||
package lark
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"github.com/Xhofe/go-cache"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type TokenCache struct {
|
|
||||||
cache.ICache[string]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TokenCache) Set(_ context.Context, key string, value string, expireTime time.Duration) error {
|
|
||||||
t.ICache.Set(key, value, cache.WithEx[string](expireTime))
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *TokenCache) Get(_ context.Context, key string) (string, error) {
|
|
||||||
v, ok := t.ICache.Get(key)
|
|
||||||
if ok {
|
|
||||||
return v, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func newTokenCache() *TokenCache {
|
|
||||||
c := cache.NewMemCache[string]()
|
|
||||||
|
|
||||||
return &TokenCache{c}
|
|
||||||
}
|
|
@ -1,66 +0,0 @@
|
|||||||
package lark
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"github.com/Xhofe/go-cache"
|
|
||||||
larkdrive "github.com/larksuite/oapi-sdk-go/v3/service/drive/v1"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
"path"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
const objTokenCacheDuration = 5 * time.Minute
|
|
||||||
const emptyFolderToken = "empty"
|
|
||||||
|
|
||||||
var objTokenCache = cache.NewMemCache[string]()
|
|
||||||
var exOpts = cache.WithEx[string](objTokenCacheDuration)
|
|
||||||
|
|
||||||
func (c *Lark) getObjToken(ctx context.Context, folderPath string) (string, bool) {
|
|
||||||
if token, ok := objTokenCache.Get(folderPath); ok {
|
|
||||||
return token, true
|
|
||||||
}
|
|
||||||
|
|
||||||
dir, name := path.Split(folderPath)
|
|
||||||
// strip the last slash of dir if it exists
|
|
||||||
if len(dir) > 0 && dir[len(dir)-1] == '/' {
|
|
||||||
dir = dir[:len(dir)-1]
|
|
||||||
}
|
|
||||||
if name == "" {
|
|
||||||
return c.rootFolderToken, true
|
|
||||||
}
|
|
||||||
|
|
||||||
var parentToken string
|
|
||||||
var found bool
|
|
||||||
parentToken, found = c.getObjToken(ctx, dir)
|
|
||||||
if !found {
|
|
||||||
return emptyFolderToken, false
|
|
||||||
}
|
|
||||||
|
|
||||||
req := larkdrive.NewListFileReqBuilder().FolderToken(parentToken).Build()
|
|
||||||
resp, err := c.client.Drive.File.ListByIterator(ctx, req)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.WithError(err).Error("failed to list files")
|
|
||||||
return emptyFolderToken, false
|
|
||||||
}
|
|
||||||
|
|
||||||
var file *larkdrive.File
|
|
||||||
for {
|
|
||||||
found, file, err = resp.Next()
|
|
||||||
if !found {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.WithError(err).Error("failed to get next file")
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if *file.Name == name {
|
|
||||||
objTokenCache.Set(folderPath, *file.Token, exOpts)
|
|
||||||
return *file.Token, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return emptyFolderToken, false
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user